summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--LICENSE3
-rw-r--r--tools/scons/scons-LICENSE25
-rw-r--r--tools/scons/scons-README204
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Action.py1147
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Builder.py844
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/CacheDir.py217
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Conftest.py778
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Debug.py216
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Defaults.py463
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Environment.py2300
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Errors.py198
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Executor.py393
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Job.py429
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Memoize.py286
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Node/Alias.py147
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Node/FS.py3075
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Node/Python.py119
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Node/__init__.py1330
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/BoolOption.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/EnumOption.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/ListOption.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/PackageOption.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/PathOption.py70
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Options/__init__.py68
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/PathList.py226
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/__init__.py216
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/aix.py65
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/cygwin.py49
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/darwin.py40
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/hpux.py40
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/irix.py38
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/os2.py49
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/posix.py258
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/sunos.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Platform/win32.py324
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/SConf.py1012
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/SConsign.py375
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/C.py126
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/D.py68
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/Dir.py105
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/Fortran.py314
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/IDL.py42
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/LaTeX.py334
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/Prog.py97
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/RC.py49
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Scanner/__init__.py406
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Script/Interactive.py376
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Script/Main.py1321
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Script/SConsOptions.py940
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Script/SConscript.py632
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Script/__init__.py408
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Sig.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Subst.py884
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Taskmaster.py985
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/386asm.py55
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/BitKeeper.py59
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/CVS.py67
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/FortranCommon.py241
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/JavaCommon.py317
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/Perforce.py98
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/PharLapCommon.py132
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/RCS.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/SCCS.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/Subversion.py65
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/__init__.py667
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/aixc++.py76
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/aixcc.py68
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/aixf77.py74
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/aixlink.py70
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/applelink.py65
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/ar.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/as.py72
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/bcc32.py76
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/c++.py93
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/cc.py108
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/cvf.py52
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/default.py44
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/dmd.py218
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/dvi.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/dvipdf.py119
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/dvips.py88
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/f77.py56
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/f90.py56
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/f95.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/filesystem.py92
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/fortran.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/g++.py84
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/g77.py67
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/gas.py47
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/gcc.py74
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/gfortran.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/gnulink.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/gs.py75
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/hpc++.py79
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/hpcc.py47
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/hplink.py71
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/icc.py53
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/icl.py46
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/ifl.py66
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/ifort.py83
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/ilink.py53
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/ilink32.py54
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/install.py223
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/intelc.py482
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/jar.py104
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/javac.py228
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/javah.py132
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/latex.py76
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/lex.py93
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/link.py112
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/linkloc.py105
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/m4.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/masm.py71
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/midl.py90
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/mingw.py151
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/mslib.py76
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/mslink.py249
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/msvc.py766
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/msvs.py1815
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/mwcc.py202
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/mwld.py101
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/nasm.py66
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/__init__.py306
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/ipk.py179
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/msi.py521
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/rpm.py362
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_tarbz2.py37
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_targz.py37
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_zip.py37
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/tarbz2.py38
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/targz.py38
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/packaging/zip.py38
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/pdf.py72
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/pdflatex.py75
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/pdftex.py99
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/qt.py330
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/rmic.py115
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/rpcgen.py64
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/rpm.py126
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sgiar.py62
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sgic++.py52
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sgicc.py47
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sgilink.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunar.py61
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunc++.py100
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/suncc.py52
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunf77.py57
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunf90.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunf95.py58
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/sunlink.py71
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/swig.py118
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/tar.py67
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/tex.py661
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/tlib.py47
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/wix.py94
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/yacc.py125
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Tool/zip.py94
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Util.py1577
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/BoolVariable.py85
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/EnumVariable.py101
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/ListVariable.py133
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/PackageVariable.py103
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/PathVariable.py141
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Variables/__init__.py304
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/Warnings.py193
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/__init__.py43
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/__init__.py244
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_UserString.py92
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_hashlib.py85
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_itertools.py118
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_optparse.py1719
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets.py577
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets15.py170
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_shlex.py319
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_subprocess.py1290
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/_scons_textwrap.py376
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/compat/builtins.py181
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/cpp.py592
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/dblite.py219
-rw-r--r--tools/scons/scons-local-1.2.0/SCons/exitfuncs.py71
-rwxr-xr-xtools/scons/scons-time.py1513
-rwxr-xr-xtools/scons/scons.py165
-rwxr-xr-xtools/scons/sconsign.py502
183 files changed, 47322 insertions, 0 deletions
diff --git a/LICENSE b/LICENSE
index 867f903e3..321be0efd 100644
--- a/LICENSE
+++ b/LICENSE
@@ -38,6 +38,9 @@ The externally maintained libraries used by Node are:
- WAF build system, located at tools/waf. WAF is copyright Thomas Nagy,
and released under the MIT license.
+ - The SCONS build system, located at tools/scons. SCONS is copyright
+ the SCONS Foundation and released under the MIT license.
+
- C-Ares, an asynchronous DNS client, located at deps/c-ares. C-Ares is
copyright the Massachusetts Institute of Technology, authored by
Greg Hudson, Daniel Stenberg and others, and released under the MIT
diff --git a/tools/scons/scons-LICENSE b/tools/scons/scons-LICENSE
new file mode 100644
index 000000000..804ed8682
--- /dev/null
+++ b/tools/scons/scons-LICENSE
@@ -0,0 +1,25 @@
+ Copyright and license for SCons - a software construction tool
+
+ This copyright and license do not apply to any other software
+ with which this software may have been included.
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/tools/scons/scons-README b/tools/scons/scons-README
new file mode 100644
index 000000000..298a22168
--- /dev/null
+++ b/tools/scons/scons-README
@@ -0,0 +1,204 @@
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+
+ SCons - a software construction tool
+
+This is the scons-README file for a version of SCons packaged for local
+execution--that is, execution out of a specific local directory, without
+having to install SCons as a system-wide utility.
+
+You are likely reading this file in one of the following two situations:
+
+ 1) You have unpacked an scons-local-{version} package and are
+ examining the contents.
+
+ In this case, you are presumably interested in using this
+ package to include a local copy of SCons with some other
+ software that you package, so that you can use SCons to build
+ your software without forcing all of your users to have it fully
+ installed. Instructions for this can be found below.
+
+ If you are not looking to use SCons in this way, then please
+ use either the scons-{version} package to install SCons on your
+ system, or the scons-src-{version} package if you want the full
+ source to SCons, including its packaging code and underlying
+ tests and testing infrastructure.
+
+ 2) This file was included in some other software package so that
+ the package could be built using SCons.
+
+ In this case, follow the instructions provided with the
+ rest of the software package for how to use SCons to build
+ and/or install the software. The file containing build and
+ installation instructions will typically be named README or
+ INSTALL.
+
+LATEST VERSION
+==============
+
+Before going further, you can check for the latest version of the
+scons-local package, or any SCons package, at the SCons download page:
+
+ http://www.scons.org/download.html
+
+
+EXECUTION REQUIREMENTS
+======================
+
+Running SCons requires Python version 1.5.2 or later. There should be
+no other dependencies or requirements to run SCons.
+
+The default SCons configuration assumes use of the Microsoft Visual C++
+compiler suite on WIN32 systems, and assumes a C compiler named 'cc',
+a C++ compiler named 'c++', and a Fortran compiler named 'g77' (such
+as found in the GNU C compiler suite) on any other type of system.
+You may, of course, override these default values by appropriate
+configuration of Environment construction variables.
+
+
+INSTALLATION
+============
+
+Installation of this package should be as simple as unpacking the
+archive (either .tar.gz or .zip) in any directory (top-level or a
+subdirectory) within the software package with which you want to ship
+SCons.
+
+Once you have installed this package, you should write an SConstruct
+file at the top level of your source tree to build your software as you
+see fit.
+
+Then modify the build/install instructions for your package to instruct
+your users to execute SCons as follows (if you installed this package in
+your top-level directory):
+
+ $ python scons.py
+
+Or (if, for example, you installed this package in a subdirectory named
+"scons"):
+
+ $ python scons/scons.py
+
+That should be all you have to do. (If it isn't that simple, please let
+us know!)
+
+
+CONTENTS OF THIS PACKAGE
+========================
+
+This scons-local package consists of the following:
+
+scons-LICENSE
+ A copy of the copyright and terms under which SCons is
+ distributed (the Open Source Initiative-approved MIT license).
+
+ A disclaimer has been added to the beginning to make clear that
+ this license applies only to SCons, and not to any separate
+ software you've written with which you're planning to package
+ SCons.
+
+scons-README
+ What you're looking at right now.
+
+scons-local-{version}/
+ The SCons build engine. This is structured as a Python
+ library.
+
+scons.py
+ The SCons script itself. The script sets up the Python
+ sys.path variable to use the build engine found in the
+ scons-local-{version}/ directory in preference to any other
+ SCons build engine installed on your system.
+
+
+DOCUMENTATION
+=============
+
+Because this package is intended to be included with other software by
+experienced users, we have not included any SCons documentation in this
+package (other than this scons-README file you're reading right now).
+
+If, however, you need documentation about SCons, then consult any of the
+following from the corresponding scons-{version} or scons-src-{version}
+package:
+
+ The RELEASE.txt file (src/RELEASE.txt file in the
+ scons-src-{version} package), which contains notes about this
+ specific release, including known problems.
+
+ The CHANGES.txt file (src/CHANGES.txt file in the
+ scons-src-{version} package), which contains a list of changes
+ since the previous release.
+
+ The scons.1 man page (doc/man/scons.1 in the scons-src-{version}
+ package), which contains a section of small examples for getting
+ started using SCons.
+
+Additional documentation for SCons is available at:
+
+ http://www.scons.org/doc.html
+
+
+LICENSING
+=========
+
+SCons is distributed under the MIT license, a full copy of which is
+available in the scons-LICENSE file in this package. The MIT license is
+an approved Open Source license, which means:
+
+ This software is OSI Certified Open Source Software. OSI
+ Certified is a certification mark of the Open Source Initiative.
+
+More information about OSI certifications and Open Source software is
+available at:
+
+ http://www.opensource.org/
+
+
+REPORTING BUGS
+==============
+
+You can report bugs either by following the "Tracker - Bugs" link
+on the SCons project page:
+
+ http://sourceforge.net/projects/scons/
+
+or by sending mail to the SCons developers mailing list:
+
+ scons-devel@lists.sourceforge.net
+
+
+MAILING LISTS
+=============
+
+A mailing list for users of SCons is available. You may send questions
+or comments to the list at:
+
+ scons-users@lists.sourceforge.net
+
+You may subscribe to the scons-users mailing list at:
+
+ http://lists.sourceforge.net/lists/listinfo/scons-users
+
+
+FOR MORE INFORMATION
+====================
+
+Check the SCons web site at:
+
+ http://www.scons.org/
+
+
+AUTHOR INFO
+===========
+
+Steven Knight
+knight at baldmt dot com
+http://www.baldmt.com/~knight/
+
+With plenty of help from the SCons Development team:
+ Chad Austin
+ Charles Crain
+ Steve Leblanc
+ Anthony Roach
+ Terrel Shumway
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Action.py b/tools/scons/scons-local-1.2.0/SCons/Action.py
new file mode 100644
index 000000000..d740de66d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Action.py
@@ -0,0 +1,1147 @@
+"""SCons.Action
+
+This encapsulates information about executing any sort of action that
+can build one or more target Nodes (typically files) from one or more
+source Nodes (also typically files) given a specific Environment.
+
+The base class here is ActionBase. The base class supplies just a few
+OO utility methods and some generic methods for displaying information
+about an Action in response to the various commands that control printing.
+
+A second-level base class is _ActionAction. This extends ActionBase
+by providing the methods that can be used to show and perform an
+action. True Action objects will subclass _ActionAction; Action
+factory class objects will subclass ActionBase.
+
+The heavy lifting is handled by subclasses for the different types of
+actions we might execute:
+
+ CommandAction
+ CommandGeneratorAction
+ FunctionAction
+ ListAction
+
+The subclasses supply the following public interface methods used by
+other modules:
+
+ __call__()
+ THE public interface, "calling" an Action object executes the
+ command or Python function. This also takes care of printing
+ a pre-substitution command for debugging purposes.
+
+ get_contents()
+ Fetches the "contents" of an Action for signature calculation
+ plus the varlist. This is what gets MD5 checksummed to decide
+ if a target needs to be rebuilt because its action changed.
+
+ genstring()
+ Returns a string representation of the Action *without*
+ command substitution, but allows a CommandGeneratorAction to
+ generate the right action based on the specified target,
+ source and env. This is used by the Signature subsystem
+ (through the Executor) to obtain an (imprecise) representation
+ of the Action operation for informative purposes.
+
+
+Subclasses also supply the following methods for internal use within
+this module:
+
+ __str__()
+ Returns a string approximation of the Action; no variable
+ substitution is performed.
+
+ execute()
+ The internal method that really, truly, actually handles the
+ execution of a command or Python function. This is used so
+ that the __call__() methods can take care of displaying any
+ pre-substitution representations, and *then* execute an action
+ without worrying about the specific Actions involved.
+
+ get_presig()
+ Fetches the "contents" of a subclass for signature calculation.
+ The varlist is added to this to produce the Action's contents.
+
+ strfunction()
+ Returns a substituted string representation of the Action.
+ This is used by the _ActionAction.show() command to display the
+ command/function that will be executed to generate the target(s).
+
+There is a related independent ActionCaller class that looks like a
+regular Action, and which serves as a wrapper for arbitrary functions
+that we want to let the user specify the arguments to now, but actually
+execute later (when an out-of-date check determines that it's needed to
+be executed, for example). Objects of this class are returned by an
+ActionFactory class that provides a __call__() method as a convenient
+way for wrapping up the functions.
+
+"""
+
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+__revision__ = "src/engine/SCons/Action.py 3842 2008/12/20 22:59:52 scons"
+
+import cPickle
+import dis
+import os
+import string
+import sys
+import subprocess
+
+from SCons.Debug import logInstanceCreation
+import SCons.Errors
+import SCons.Executor
+import SCons.Util
+import SCons.Subst
+
+# we use these a lot, so try to optimize them
+is_String = SCons.Util.is_String
+is_List = SCons.Util.is_List
+
+class _null:
+ pass
+
+print_actions = 1
+execute_actions = 1
+print_actions_presub = 0
+
+def rfile(n):
+ try:
+ return n.rfile()
+ except AttributeError:
+ return n
+
+def default_exitstatfunc(s):
+ return s
+
+try:
+ SET_LINENO = dis.SET_LINENO
+ HAVE_ARGUMENT = dis.HAVE_ARGUMENT
+except AttributeError:
+ remove_set_lineno_codes = lambda x: x
+else:
+ def remove_set_lineno_codes(code):
+ result = []
+ n = len(code)
+ i = 0
+ while i < n:
+ c = code[i]
+ op = ord(c)
+ if op >= HAVE_ARGUMENT:
+ if op != SET_LINENO:
+ result.append(code[i:i+3])
+ i = i+3
+ else:
+ result.append(c)
+ i = i+1
+ return string.join(result, '')
+
+
+def _callable_contents(obj):
+ """Return the signature contents of a callable Python object.
+ """
+ try:
+ # Test if obj is a method.
+ return _function_contents(obj.im_func)
+
+ except AttributeError:
+ try:
+ # Test if obj is a callable object.
+ return _function_contents(obj.__call__.im_func)
+
+ except AttributeError:
+ try:
+ # Test if obj is a code object.
+ return _code_contents(obj)
+
+ except AttributeError:
+ # Test if obj is a function object.
+ return _function_contents(obj)
+
+
+def _object_contents(obj):
+ """Return the signature contents of any Python object.
+
+ We have to handle the case where object contains a code object
+ since it can be pickled directly.
+ """
+ try:
+ # Test if obj is a method.
+ return _function_contents(obj.im_func)
+
+ except AttributeError:
+ try:
+ # Test if obj is a callable object.
+ return _function_contents(obj.__call__.im_func)
+
+ except AttributeError:
+ try:
+ # Test if obj is a code object.
+ return _code_contents(obj)
+
+ except AttributeError:
+ try:
+ # Test if obj is a function object.
+ return _function_contents(obj)
+
+ except AttributeError:
+ # Should be a pickable Python object.
+ try:
+ return cPickle.dumps(obj)
+ except (cPickle.PicklingError, TypeError):
+ # This is weird, but it seems that nested classes
+ # are unpickable. The Python docs say it should
+ # always be a PicklingError, but some Python
+ # versions seem to return TypeError. Just do
+ # the best we can.
+ return str(obj)
+
+
+def _code_contents(code):
+ """Return the signature contents of a code object.
+
+ By providing direct access to the code object of the
+ function, Python makes this extremely easy. Hooray!
+
+ Unfortunately, older versions of Python include line
+ number indications in the compiled byte code. Boo!
+ So we remove the line number byte codes to prevent
+ recompilations from moving a Python function.
+ """
+
+ contents = []
+
+ # The code contents depends on the number of local variables
+ # but not their actual names.
+ contents.append("%s,%s" % (code.co_argcount, len(code.co_varnames)))
+ try:
+ contents.append(",%s,%s" % (len(code.co_cellvars), len(code.co_freevars)))
+ except AttributeError:
+ # Older versions of Python do not support closures.
+ contents.append(",0,0")
+
+ # The code contents depends on any constants accessed by the
+ # function. Note that we have to call _object_contents on each
+ # constants because the code object of nested functions can
+ # show-up among the constants.
+ #
+ # Note that we also always ignore the first entry of co_consts
+ # which contains the function doc string. We assume that the
+ # function does not access its doc string.
+ contents.append(',(' + string.join(map(_object_contents,code.co_consts[1:]),',') + ')')
+
+ # The code contents depends on the variable names used to
+ # accessed global variable, as changing the variable name changes
+ # the variable actually accessed and therefore changes the
+ # function result.
+ contents.append(',(' + string.join(map(_object_contents,code.co_names),',') + ')')
+
+
+ # The code contents depends on its actual code!!!
+ contents.append(',(' + str(remove_set_lineno_codes(code.co_code)) + ')')
+
+ return string.join(contents, '')
+
+
+def _function_contents(func):
+ """Return the signature contents of a function."""
+
+ contents = [_code_contents(func.func_code)]
+
+ # The function contents depends on the value of defaults arguments
+ if func.func_defaults:
+ contents.append(',(' + string.join(map(_object_contents,func.func_defaults),',') + ')')
+ else:
+ contents.append(',()')
+
+ # The function contents depends on the closure captured cell values.
+ try:
+ closure = func.func_closure or []
+ except AttributeError:
+ # Older versions of Python do not support closures.
+ closure = []
+
+ #xxx = [_object_contents(x.cell_contents) for x in closure]
+ try:
+ xxx = map(lambda x: _object_contents(x.cell_contents), closure)
+ except AttributeError:
+ xxx = []
+ contents.append(',(' + string.join(xxx, ',') + ')')
+
+ return string.join(contents, '')
+
+
+def _actionAppend(act1, act2):
+ # This function knows how to slap two actions together.
+ # Mainly, it handles ListActions by concatenating into
+ # a single ListAction.
+ a1 = Action(act1)
+ a2 = Action(act2)
+ if a1 is None or a2 is None:
+ raise TypeError, "Cannot append %s to %s" % (type(act1), type(act2))
+ if isinstance(a1, ListAction):
+ if isinstance(a2, ListAction):
+ return ListAction(a1.list + a2.list)
+ else:
+ return ListAction(a1.list + [ a2 ])
+ else:
+ if isinstance(a2, ListAction):
+ return ListAction([ a1 ] + a2.list)
+ else:
+ return ListAction([ a1, a2 ])
+
+def _do_create_keywords(args, kw):
+ """This converts any arguments after the action argument into
+ their equivalent keywords and adds them to the kw argument.
+ """
+ v = kw.get('varlist', ())
+ # prevent varlist="FOO" from being interpreted as ['F', 'O', 'O']
+ if is_String(v): v = (v,)
+ kw['varlist'] = tuple(v)
+ if args:
+ # turn positional args into equivalent keywords
+ cmdstrfunc = args[0]
+ if cmdstrfunc is None or is_String(cmdstrfunc):
+ kw['cmdstr'] = cmdstrfunc
+ elif callable(cmdstrfunc):
+ kw['strfunction'] = cmdstrfunc
+ else:
+ raise SCons.Errors.UserError(
+ 'Invalid command display variable type. '
+ 'You must either pass a string or a callback which '
+ 'accepts (target, source, env) as parameters.')
+ if len(args) > 1:
+ kw['varlist'] = args[1:] + kw['varlist']
+ if kw.get('strfunction', _null) is not _null \
+ and kw.get('cmdstr', _null) is not _null:
+ raise SCons.Errors.UserError(
+ 'Cannot have both strfunction and cmdstr args to Action()')
+
+def _do_create_action(act, kw):
+ """This is the actual "implementation" for the
+ Action factory method, below. This handles the
+ fact that passing lists to Action() itself has
+ different semantics than passing lists as elements
+ of lists.
+
+ The former will create a ListAction, the latter
+ will create a CommandAction by converting the inner
+ list elements to strings."""
+
+ if isinstance(act, ActionBase):
+ return act
+
+ if is_List(act):
+ #TODO(1.5) return CommandAction(act, **kw)
+ return apply(CommandAction, (act,), kw)
+
+ if callable(act):
+ try:
+ gen = kw['generator']
+ del kw['generator']
+ except KeyError:
+ gen = 0
+ if gen:
+ action_type = CommandGeneratorAction
+ else:
+ action_type = FunctionAction
+ return action_type(act, kw)
+
+ if is_String(act):
+ var=SCons.Util.get_environment_var(act)
+ if var:
+ # This looks like a string that is purely an Environment
+ # variable reference, like "$FOO" or "${FOO}". We do
+ # something special here...we lazily evaluate the contents
+ # of that Environment variable, so a user could put something
+ # like a function or a CommandGenerator in that variable
+ # instead of a string.
+ return LazyAction(var, kw)
+ commands = string.split(str(act), '\n')
+ if len(commands) == 1:
+ #TODO(1.5) return CommandAction(commands[0], **kw)
+ return apply(CommandAction, (commands[0],), kw)
+ # The list of string commands may include a LazyAction, so we
+ # reprocess them via _do_create_list_action.
+ return _do_create_list_action(commands, kw)
+ return None
+
+def _do_create_list_action(act, kw):
+ """A factory for list actions. Convert the input list into Actions
+ and then wrap them in a ListAction."""
+ acts = []
+ for a in act:
+ aa = _do_create_action(a, kw)
+ if aa is not None: acts.append(aa)
+ if not acts:
+ return None
+ elif len(acts) == 1:
+ return acts[0]
+ else:
+ return ListAction(acts)
+
+def Action(act, *args, **kw):
+ """A factory for action objects."""
+ # Really simple: the _do_create_* routines do the heavy lifting.
+ _do_create_keywords(args, kw)
+ if is_List(act):
+ return _do_create_list_action(act, kw)
+ return _do_create_action(act, kw)
+
+class ActionBase:
+ """Base class for all types of action objects that can be held by
+ other objects (Builders, Executors, etc.) This provides the
+ common methods for manipulating and combining those actions."""
+
+ def __cmp__(self, other):
+ return cmp(self.__dict__, other)
+
+ def genstring(self, target, source, env):
+ return str(self)
+
+ def get_contents(self, target, source, env):
+ result = [ self.get_presig(target, source, env) ]
+ # This should never happen, as the Action() factory should wrap
+ # the varlist, but just in case an action is created directly,
+ # we duplicate this check here.
+ vl = self.varlist
+ if is_String(vl): vl = (vl,)
+ for v in vl:
+ result.append(env.subst('${'+v+'}'))
+ return string.join(result, '')
+
+ def __add__(self, other):
+ return _actionAppend(self, other)
+
+ def __radd__(self, other):
+ return _actionAppend(other, self)
+
+ def presub_lines(self, env):
+ # CommandGeneratorAction needs a real environment
+ # in order to return the proper string here, since
+ # it may call LazyAction, which looks up a key
+ # in that env. So we temporarily remember the env here,
+ # and CommandGeneratorAction will use this env
+ # when it calls its _generate method.
+ self.presub_env = env
+ lines = string.split(str(self), '\n')
+ self.presub_env = None # don't need this any more
+ return lines
+
+ def get_executor(self, env, overrides, tlist, slist, executor_kw):
+ """Return the Executor for this Action."""
+ return SCons.Executor.Executor(self, env, overrides,
+ tlist, slist, executor_kw)
+
+class _ActionAction(ActionBase):
+ """Base class for actions that create output objects."""
+ def __init__(self, cmdstr=_null, strfunction=_null, varlist=(),
+ presub=_null, chdir=None, exitstatfunc=None,
+ **kw):
+ self.cmdstr = cmdstr
+ if strfunction is not _null:
+ if strfunction is None:
+ self.cmdstr = None
+ else:
+ self.strfunction = strfunction
+ self.varlist = varlist
+ self.presub = presub
+ self.chdir = chdir
+ if not exitstatfunc:
+ exitstatfunc = default_exitstatfunc
+ self.exitstatfunc = exitstatfunc
+
+ def print_cmd_line(self, s, target, source, env):
+ sys.stdout.write(s + "\n")
+
+ def __call__(self, target, source, env,
+ exitstatfunc=_null,
+ presub=_null,
+ show=_null,
+ execute=_null,
+ chdir=_null):
+ if not is_List(target):
+ target = [target]
+ if not is_List(source):
+ source = [source]
+
+ if presub is _null:
+ presub = self.presub
+ if presub is _null:
+ presub = print_actions_presub
+ if exitstatfunc is _null: exitstatfunc = self.exitstatfunc
+ if show is _null: show = print_actions
+ if execute is _null: execute = execute_actions
+ if chdir is _null: chdir = self.chdir
+ save_cwd = None
+ if chdir:
+ save_cwd = os.getcwd()
+ try:
+ chdir = str(chdir.abspath)
+ except AttributeError:
+ if not is_String(chdir):
+ chdir = str(target[0].dir)
+ if presub:
+ t = string.join(map(str, target), ' and ')
+ l = string.join(self.presub_lines(env), '\n ')
+ out = "Building %s with action:\n %s\n" % (t, l)
+ sys.stdout.write(out)
+ cmd = None
+ if show and self.strfunction:
+ cmd = self.strfunction(target, source, env)
+ if cmd:
+ if chdir:
+ cmd = ('os.chdir(%s)\n' % repr(chdir)) + cmd
+ try:
+ get = env.get
+ except AttributeError:
+ print_func = self.print_cmd_line
+ else:
+ print_func = get('PRINT_CMD_LINE_FUNC')
+ if not print_func:
+ print_func = self.print_cmd_line
+ print_func(cmd, target, source, env)
+ stat = 0
+ if execute:
+ if chdir:
+ os.chdir(chdir)
+ try:
+ stat = self.execute(target, source, env)
+ if isinstance(stat, SCons.Errors.BuildError):
+ s = exitstatfunc(stat.status)
+ if s:
+ stat.status = s
+ else:
+ stat = s
+ else:
+ stat = exitstatfunc(stat)
+ finally:
+ if save_cwd:
+ os.chdir(save_cwd)
+ if cmd and save_cwd:
+ print_func('os.chdir(%s)' % repr(save_cwd), target, source, env)
+
+ return stat
+
+
+def _string_from_cmd_list(cmd_list):
+ """Takes a list of command line arguments and returns a pretty
+ representation for printing."""
+ cl = []
+ for arg in map(str, cmd_list):
+ if ' ' in arg or '\t' in arg:
+ arg = '"' + arg + '"'
+ cl.append(arg)
+ return string.join(cl)
+
+# A fiddlin' little function that has an 'import SCons.Environment' which
+# can't be moved to the top level without creating an import loop. Since
+# this import creates a local variable named 'SCons', it blocks access to
+# the global variable, so we move it here to prevent complaints about local
+# variables being used uninitialized.
+default_ENV = None
+def get_default_ENV(env):
+ global default_ENV
+ try:
+ return env['ENV']
+ except KeyError:
+ if not default_ENV:
+ import SCons.Environment
+ # This is a hideously expensive way to get a default shell
+ # environment. What it really should do is run the platform
+ # setup to get the default ENV. Fortunately, it's incredibly
+ # rare for an Environment not to have a shell environment, so
+ # we're not going to worry about it overmuch.
+ default_ENV = SCons.Environment.Environment()['ENV']
+ return default_ENV
+
+# This function is still in draft mode. We're going to need something like
+# it in the long run as more and more places use subprocess, but I'm sure
+# it'll have to be tweaked to get the full desired functionality.
+# one special arg (so far?), 'error', to tell what to do with exceptions.
+def _subproc(env, cmd, error = 'ignore', **kw):
+ """Do common setup for a subprocess.Popen() call"""
+ # allow std{in,out,err} to be "'devnull'"
+ io = kw.get('stdin')
+ if is_String(io) and io == 'devnull':
+ kw['stdin'] = open(os.devnull)
+ io = kw.get('stdout')
+ if is_String(io) and io == 'devnull':
+ kw['stdout'] = open(os.devnull, 'w')
+ io = kw.get('stderr')
+ if is_String(io) and io == 'devnull':
+ kw['stderr'] = open(os.devnull, 'w')
+
+ # Figure out what shell environment to use
+ ENV = kw.get('env', None)
+ if ENV is None: ENV = get_default_ENV(env)
+
+ # Ensure that the ENV values are all strings:
+ new_env = {}
+ for key, value in ENV.items():
+ if is_List(value):
+ # If the value is a list, then we assume it is a path list,
+ # because that's a pretty common list-like value to stick
+ # in an environment variable:
+ value = SCons.Util.flatten_sequence(value)
+ new_env[key] = string.join(map(str, value), os.pathsep)
+ else:
+ # It's either a string or something else. If it's a string,
+ # we still want to call str() because it might be a *Unicode*
+ # string, which makes subprocess.Popen() gag. If it isn't a
+ # string or a list, then we just coerce it to a string, which
+ # is the proper way to handle Dir and File instances and will
+ # produce something reasonable for just about everything else:
+ new_env[key] = str(value)
+ kw['env'] = new_env
+
+ try:
+ #FUTURE return subprocess.Popen(cmd, **kw)
+ return apply(subprocess.Popen, (cmd,), kw)
+ except EnvironmentError, e:
+ if error == 'raise': raise
+ # return a dummy Popen instance that only returns error
+ class dummyPopen:
+ def __init__(self, e): self.exception = e
+ def communicate(self): return ('','')
+ def wait(self): return -self.exception.errno
+ stdin = None
+ class f:
+ def read(self): return ''
+ def readline(self): return ''
+ stdout = stderr = f()
+ return dummyPopen(e)
+
+class CommandAction(_ActionAction):
+ """Class for command-execution actions."""
+ def __init__(self, cmd, **kw):
+ # Cmd can actually be a list or a single item; if it's a
+ # single item it should be the command string to execute; if a
+ # list then it should be the words of the command string to
+ # execute. Only a single command should be executed by this
+ # object; lists of commands should be handled by embedding
+ # these objects in a ListAction object (which the Action()
+ # factory above does). cmd will be passed to
+ # Environment.subst_list() for substituting environment
+ # variables.
+ if __debug__: logInstanceCreation(self, 'Action.CommandAction')
+
+ #TODO(1.5) _ActionAction.__init__(self, **kw)
+ apply(_ActionAction.__init__, (self,), kw)
+ if is_List(cmd):
+ if filter(is_List, cmd):
+ raise TypeError, "CommandAction should be given only " \
+ "a single command"
+ self.cmd_list = cmd
+
+ def __str__(self):
+ if is_List(self.cmd_list):
+ return string.join(map(str, self.cmd_list), ' ')
+ return str(self.cmd_list)
+
+ def process(self, target, source, env):
+ result = env.subst_list(self.cmd_list, 0, target, source)
+ silent = None
+ ignore = None
+ while 1:
+ try: c = result[0][0][0]
+ except IndexError: c = None
+ if c == '@': silent = 1
+ elif c == '-': ignore = 1
+ else: break
+ result[0][0] = result[0][0][1:]
+ try:
+ if not result[0][0]:
+ result[0] = result[0][1:]
+ except IndexError:
+ pass
+ return result, ignore, silent
+
+ def strfunction(self, target, source, env):
+ if self.cmdstr is None:
+ return None
+ if self.cmdstr is not _null:
+ from SCons.Subst import SUBST_RAW
+ c = env.subst(self.cmdstr, SUBST_RAW, target, source)
+ if c:
+ return c
+ cmd_list, ignore, silent = self.process(target, source, env)
+ if silent:
+ return ''
+ return _string_from_cmd_list(cmd_list[0])
+
+ def execute(self, target, source, env):
+ """Execute a command action.
+
+ This will handle lists of commands as well as individual commands,
+ because construction variable substitution may turn a single
+ "command" into a list. This means that this class can actually
+ handle lists of commands, even though that's not how we use it
+ externally.
+ """
+ escape_list = SCons.Subst.escape_list
+ flatten_sequence = SCons.Util.flatten_sequence
+
+ try:
+ shell = env['SHELL']
+ except KeyError:
+ raise SCons.Errors.UserError('Missing SHELL construction variable.')
+
+ try:
+ spawn = env['SPAWN']
+ except KeyError:
+ raise SCons.Errors.UserError('Missing SPAWN construction variable.')
+ else:
+ if is_String(spawn):
+ spawn = env.subst(spawn, raw=1, conv=lambda x: x)
+
+ escape = env.get('ESCAPE', lambda x: x)
+
+ ENV = get_default_ENV(env)
+
+ # Ensure that the ENV values are all strings:
+ for key, value in ENV.items():
+ if not is_String(value):
+ if is_List(value):
+ # If the value is a list, then we assume it is a
+ # path list, because that's a pretty common list-like
+ # value to stick in an environment variable:
+ value = flatten_sequence(value)
+ ENV[key] = string.join(map(str, value), os.pathsep)
+ else:
+ # If it isn't a string or a list, then we just coerce
+ # it to a string, which is the proper way to handle
+ # Dir and File instances and will produce something
+ # reasonable for just about everything else:
+ ENV[key] = str(value)
+
+ cmd_list, ignore, silent = self.process(target, map(rfile, source), env)
+
+ # Use len() to filter out any "command" that's zero-length.
+ for cmd_line in filter(len, cmd_list):
+ # Escape the command line for the interpreter we are using.
+ cmd_line = escape_list(cmd_line, escape)
+ result = spawn(shell, escape, cmd_line[0], cmd_line, ENV)
+ if not ignore and result:
+ msg = "Error %s" % result
+ return SCons.Errors.BuildError(errstr=msg,
+ status=result,
+ action=self,
+ command=cmd_line)
+ return 0
+
+ def get_presig(self, target, source, env):
+ """Return the signature contents of this action's command line.
+
+ This strips $(-$) and everything in between the string,
+ since those parts don't affect signatures.
+ """
+ from SCons.Subst import SUBST_SIG
+ cmd = self.cmd_list
+ if is_List(cmd):
+ cmd = string.join(map(str, cmd))
+ else:
+ cmd = str(cmd)
+ return env.subst_target_source(cmd, SUBST_SIG, target, source)
+
+ def get_implicit_deps(self, target, source, env):
+ icd = env.get('IMPLICIT_COMMAND_DEPENDENCIES', True)
+ if is_String(icd) and icd[:1] == '$':
+ icd = env.subst(icd)
+ if not icd or icd in ('0', 'None'):
+ return []
+ from SCons.Subst import SUBST_SIG
+ cmd_list = env.subst_list(self.cmd_list, SUBST_SIG, target, source)
+ res = []
+ for cmd_line in cmd_list:
+ if cmd_line:
+ d = env.WhereIs(str(cmd_line[0]))
+ if d:
+ res.append(env.fs.File(d))
+ return res
+
+class CommandGeneratorAction(ActionBase):
+ """Class for command-generator actions."""
+ def __init__(self, generator, kw):
+ if __debug__: logInstanceCreation(self, 'Action.CommandGeneratorAction')
+ self.generator = generator
+ self.gen_kw = kw
+ self.varlist = kw.get('varlist', ())
+
+ def _generate(self, target, source, env, for_signature):
+ # ensure that target is a list, to make it easier to write
+ # generator functions:
+ if not is_List(target):
+ target = [target]
+
+ ret = self.generator(target=target, source=source, env=env, for_signature=for_signature)
+ #TODO(1.5) gen_cmd = Action(ret, **self.gen_kw)
+ gen_cmd = apply(Action, (ret,), self.gen_kw)
+ if not gen_cmd:
+ raise SCons.Errors.UserError("Object returned from command generator: %s cannot be used to create an Action." % repr(ret))
+ return gen_cmd
+
+ def __str__(self):
+ try:
+ env = self.presub_env
+ except AttributeError:
+ env = None
+ if env is None:
+ env = SCons.Defaults.DefaultEnvironment()
+ act = self._generate([], [], env, 1)
+ return str(act)
+
+ def genstring(self, target, source, env):
+ return self._generate(target, source, env, 1).genstring(target, source, env)
+
+ def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
+ show=_null, execute=_null, chdir=_null):
+ act = self._generate(target, source, env, 0)
+ return act(target, source, env, exitstatfunc, presub,
+ show, execute, chdir)
+
+ def get_presig(self, target, source, env):
+ """Return the signature contents of this action's command line.
+
+ This strips $(-$) and everything in between the string,
+ since those parts don't affect signatures.
+ """
+ return self._generate(target, source, env, 1).get_presig(target, source, env)
+
+ def get_implicit_deps(self, target, source, env):
+ return self._generate(target, source, env, 1).get_implicit_deps(target, source, env)
+
+
+
+# A LazyAction is a kind of hybrid generator and command action for
+# strings of the form "$VAR". These strings normally expand to other
+# strings (think "$CCCOM" to "$CC -c -o $TARGET $SOURCE"), but we also
+# want to be able to replace them with functions in the construction
+# environment. Consequently, we want lazy evaluation and creation of
+# an Action in the case of the function, but that's overkill in the more
+# normal case of expansion to other strings.
+#
+# So we do this with a subclass that's both a generator *and*
+# a command action. The overridden methods all do a quick check
+# of the construction variable, and if it's a string we just call
+# the corresponding CommandAction method to do the heavy lifting.
+# If not, then we call the same-named CommandGeneratorAction method.
+# The CommandGeneratorAction methods work by using the overridden
+# _generate() method, that is, our own way of handling "generation" of
+# an action based on what's in the construction variable.
+
+class LazyAction(CommandGeneratorAction, CommandAction):
+
+ def __init__(self, var, kw):
+ if __debug__: logInstanceCreation(self, 'Action.LazyAction')
+ #FUTURE CommandAction.__init__(self, '${'+var+'}', **kw)
+ apply(CommandAction.__init__, (self, '${'+var+'}'), kw)
+ self.var = SCons.Util.to_String(var)
+ self.gen_kw = kw
+
+ def get_parent_class(self, env):
+ c = env.get(self.var)
+ if is_String(c) and not '\n' in c:
+ return CommandAction
+ return CommandGeneratorAction
+
+ def _generate_cache(self, env):
+ c = env.get(self.var, '')
+ #TODO(1.5) gen_cmd = Action(c, **self.gen_kw)
+ gen_cmd = apply(Action, (c,), self.gen_kw)
+ if not gen_cmd:
+ raise SCons.Errors.UserError("$%s value %s cannot be used to create an Action." % (self.var, repr(c)))
+ return gen_cmd
+
+ def _generate(self, target, source, env, for_signature):
+ return self._generate_cache(env)
+
+ def __call__(self, target, source, env, *args, **kw):
+ args = (self, target, source, env) + args
+ c = self.get_parent_class(env)
+ #TODO(1.5) return c.__call__(*args, **kw)
+ return apply(c.__call__, args, kw)
+
+ def get_presig(self, target, source, env):
+ c = self.get_parent_class(env)
+ return c.get_presig(self, target, source, env)
+
+
+
+class FunctionAction(_ActionAction):
+ """Class for Python function actions."""
+
+ def __init__(self, execfunction, kw):
+ if __debug__: logInstanceCreation(self, 'Action.FunctionAction')
+
+ self.execfunction = execfunction
+ try:
+ self.funccontents = _callable_contents(execfunction)
+ except AttributeError:
+ try:
+ # See if execfunction will do the heavy lifting for us.
+ self.gc = execfunction.get_contents
+ except AttributeError:
+ # This is weird, just do the best we can.
+ self.funccontents = _object_contents(execfunction)
+
+ #TODO(1.5) _ActionAction.__init__(self, **kw)
+ apply(_ActionAction.__init__, (self,), kw)
+
+ def function_name(self):
+ try:
+ return self.execfunction.__name__
+ except AttributeError:
+ try:
+ return self.execfunction.__class__.__name__
+ except AttributeError:
+ return "unknown_python_function"
+
+ def strfunction(self, target, source, env):
+ if self.cmdstr is None:
+ return None
+ if self.cmdstr is not _null:
+ from SCons.Subst import SUBST_RAW
+ c = env.subst(self.cmdstr, SUBST_RAW, target, source)
+ if c:
+ return c
+ def array(a):
+ def quote(s):
+ try:
+ str_for_display = s.str_for_display
+ except AttributeError:
+ s = repr(s)
+ else:
+ s = str_for_display()
+ return s
+ return '[' + string.join(map(quote, a), ", ") + ']'
+ try:
+ strfunc = self.execfunction.strfunction
+ except AttributeError:
+ pass
+ else:
+ if strfunc is None:
+ return None
+ if callable(strfunc):
+ return strfunc(target, source, env)
+ name = self.function_name()
+ tstr = array(target)
+ sstr = array(source)
+ return "%s(%s, %s)" % (name, tstr, sstr)
+
+ def __str__(self):
+ name = self.function_name()
+ if name == 'ActionCaller':
+ return str(self.execfunction)
+ return "%s(target, source, env)" % name
+
+ def execute(self, target, source, env):
+ exc_info = (None,None,None)
+ try:
+ rsources = map(rfile, source)
+ try:
+ result = self.execfunction(target=target, source=rsources, env=env)
+ except KeyboardInterrupt, e:
+ raise
+ except SystemExit, e:
+ raise
+ except Exception, e:
+ result = e
+ exc_info = sys.exc_info()
+
+ if result:
+ result = SCons.Errors.convert_to_BuildError(result, exc_info)
+ result.node=target
+ result.action=self
+ result.command=self.strfunction(target, source, env)
+
+ # FIXME: This maintains backward compatibility with respect to
+ # which type of exceptions were returned by raising an
+ # exception and which ones were returned by value. It would
+ # probably be best to always return them by value here, but
+ # some codes do not check the return value of Actions and I do
+ # not have the time to modify them at this point.
+ if (exc_info[1] and
+ not isinstance(exc_info[1],EnvironmentError)):
+ raise result
+
+ return result
+ finally:
+ # Break the cycle between the traceback object and this
+ # function stack frame. See the sys.exc_info() doc info for
+ # more information about this issue.
+ del exc_info
+
+
+ def get_presig(self, target, source, env):
+ """Return the signature contents of this callable action."""
+ try:
+ return self.gc(target, source, env)
+ except AttributeError:
+ return self.funccontents
+
+ def get_implicit_deps(self, target, source, env):
+ return []
+
+class ListAction(ActionBase):
+ """Class for lists of other actions."""
+ def __init__(self, list):
+ if __debug__: logInstanceCreation(self, 'Action.ListAction')
+ def list_of_actions(x):
+ if isinstance(x, ActionBase):
+ return x
+ return Action(x)
+ self.list = map(list_of_actions, list)
+ # our children will have had any varlist
+ # applied; we don't need to do it again
+ self.varlist = ()
+
+ def genstring(self, target, source, env):
+ return string.join(map(lambda a, t=target, s=source, e=env:
+ a.genstring(t, s, e),
+ self.list),
+ '\n')
+
+ def __str__(self):
+ return string.join(map(str, self.list), '\n')
+
+ def presub_lines(self, env):
+ return SCons.Util.flatten_sequence(
+ map(lambda a, env=env: a.presub_lines(env), self.list))
+
+ def get_presig(self, target, source, env):
+ """Return the signature contents of this action list.
+
+ Simple concatenation of the signatures of the elements.
+ """
+ return string.join(map(lambda x, t=target, s=source, e=env:
+ x.get_contents(t, s, e),
+ self.list),
+ "")
+
+ def __call__(self, target, source, env, exitstatfunc=_null, presub=_null,
+ show=_null, execute=_null, chdir=_null):
+ for act in self.list:
+ stat = act(target, source, env, exitstatfunc, presub,
+ show, execute, chdir)
+ if stat:
+ return stat
+ return 0
+
+ def get_implicit_deps(self, target, source, env):
+ result = []
+ for act in self.list:
+ result.extend(act.get_implicit_deps(target, source, env))
+ return result
+
+class ActionCaller:
+ """A class for delaying calling an Action function with specific
+ (positional and keyword) arguments until the Action is actually
+ executed.
+
+ This class looks to the rest of the world like a normal Action object,
+ but what it's really doing is hanging on to the arguments until we
+ have a target, source and env to use for the expansion.
+ """
+ def __init__(self, parent, args, kw):
+ self.parent = parent
+ self.args = args
+ self.kw = kw
+
+ def get_contents(self, target, source, env):
+ actfunc = self.parent.actfunc
+ try:
+ # "self.actfunc" is a function.
+ contents = str(actfunc.func_code.co_code)
+ except AttributeError:
+ # "self.actfunc" is a callable object.
+ try:
+ contents = str(actfunc.__call__.im_func.func_code.co_code)
+ except AttributeError:
+ # No __call__() method, so it might be a builtin
+ # or something like that. Do the best we can.
+ contents = str(actfunc)
+ contents = remove_set_lineno_codes(contents)
+ return contents
+
+ def subst(self, s, target, source, env):
+ # If s is a list, recursively apply subst()
+ # to every element in the list
+ if is_List(s):
+ result = []
+ for elem in s:
+ result.append(self.subst(elem, target, source, env))
+ return self.parent.convert(result)
+
+ # Special-case hack: Let a custom function wrapped in an
+ # ActionCaller get at the environment through which the action
+ # was called by using this hard-coded value as a special return.
+ if s == '$__env__':
+ return env
+ elif is_String(s):
+ return env.subst(s, 1, target, source)
+ return self.parent.convert(s)
+
+ def subst_args(self, target, source, env):
+ return map(lambda x, self=self, t=target, s=source, e=env:
+ self.subst(x, t, s, e),
+ self.args)
+
+ def subst_kw(self, target, source, env):
+ kw = {}
+ for key in self.kw.keys():
+ kw[key] = self.subst(self.kw[key], target, source, env)
+ return kw
+
+ def __call__(self, target, source, env):
+ args = self.subst_args(target, source, env)
+ kw = self.subst_kw(target, source, env)
+ #TODO(1.5) return self.parent.actfunc(*args, **kw)
+ return apply(self.parent.actfunc, args, kw)
+
+ def strfunction(self, target, source, env):
+ args = self.subst_args(target, source, env)
+ kw = self.subst_kw(target, source, env)
+ #TODO(1.5) return self.parent.strfunc(*args, **kw)
+ return apply(self.parent.strfunc, args, kw)
+
+ def __str__(self):
+ #TODO(1.5) return self.parent.strfunc(*self.args, **self.kw)
+ return apply(self.parent.strfunc, self.args, self.kw)
+
+class ActionFactory:
+ """A factory class that will wrap up an arbitrary function
+ as an SCons-executable Action object.
+
+ The real heavy lifting here is done by the ActionCaller class.
+ We just collect the (positional and keyword) arguments that we're
+ called with and give them to the ActionCaller object we create,
+ so it can hang onto them until it needs them.
+ """
+ def __init__(self, actfunc, strfunc, convert=lambda x: x):
+ self.actfunc = actfunc
+ self.strfunc = strfunc
+ self.convert = convert
+
+ def __call__(self, *args, **kw):
+ ac = ActionCaller(self, args, kw)
+ action = Action(ac, strfunction=ac.strfunction)
+ return action
diff --git a/tools/scons/scons-local-1.2.0/SCons/Builder.py b/tools/scons/scons-local-1.2.0/SCons/Builder.py
new file mode 100644
index 000000000..97aabb483
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Builder.py
@@ -0,0 +1,844 @@
+"""SCons.Builder
+
+Builder object subsystem.
+
+A Builder object is a callable that encapsulates information about how
+to execute actions to create a target Node (file) from source Nodes
+(files), and how to create those dependencies for tracking.
+
+The main entry point here is the Builder() factory method. This provides
+a procedural interface that creates the right underlying Builder object
+based on the keyword arguments supplied and the types of the arguments.
+
+The goal is for this external interface to be simple enough that the
+vast majority of users can create new Builders as necessary to support
+building new types of files in their configurations, without having to
+dive any deeper into this subsystem.
+
+The base class here is BuilderBase. This is a concrete base class which
+does, in fact, represent the Builder objects that we (or users) create.
+
+There is also a proxy that looks like a Builder:
+
+ CompositeBuilder
+
+ This proxies for a Builder with an action that is actually a
+ dictionary that knows how to map file suffixes to a specific
+ action. This is so that we can invoke different actions
+ (compilers, compile options) for different flavors of source
+ files.
+
+Builders and their proxies have the following public interface methods
+used by other modules:
+
+ __call__()
+ THE public interface. Calling a Builder object (with the
+ use of internal helper methods) sets up the target and source
+ dependencies, appropriate mapping to a specific action, and the
+ environment manipulation necessary for overridden construction
+ variable. This also takes care of warning about possible mistakes
+ in keyword arguments.
+
+ add_emitter()
+ Adds an emitter for a specific file suffix, used by some Tool
+ modules to specify that (for example) a yacc invocation on a .y
+ can create a .h *and* a .c file.
+
+ add_action()
+ Adds an action for a specific file suffix, heavily used by
+ Tool modules to add their specific action(s) for turning
+ a source file into an object file to the global static
+ and shared object file Builders.
+
+There are the following methods for internal use within this module:
+
+ _execute()
+ The internal method that handles the heavily lifting when a
+ Builder is called. This is used so that the __call__() methods
+ can set up warning about possible mistakes in keyword-argument
+ overrides, and *then* execute all of the steps necessary so that
+ the warnings only occur once.
+
+ get_name()
+ Returns the Builder's name within a specific Environment,
+ primarily used to try to return helpful information in error
+ messages.
+
+ adjust_suffix()
+ get_prefix()
+ get_suffix()
+ get_src_suffix()
+ set_src_suffix()
+ Miscellaneous stuff for handling the prefix and suffix
+ manipulation we use in turning source file names into target
+ file names.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Builder.py 3842 2008/12/20 22:59:52 scons"
+
+import UserDict
+import UserList
+
+import SCons.Action
+from SCons.Debug import logInstanceCreation
+from SCons.Errors import InternalError, UserError
+import SCons.Executor
+import SCons.Memoize
+import SCons.Node
+import SCons.Node.FS
+import SCons.Util
+import SCons.Warnings
+
+class _Null:
+ pass
+
+_null = _Null
+
+class DictCmdGenerator(SCons.Util.Selector):
+ """This is a callable class that can be used as a
+ command generator function. It holds on to a dictionary
+ mapping file suffixes to Actions. It uses that dictionary
+ to return the proper action based on the file suffix of
+ the source file."""
+
+ def __init__(self, dict=None, source_ext_match=1):
+ SCons.Util.Selector.__init__(self, dict)
+ self.source_ext_match = source_ext_match
+
+ def src_suffixes(self):
+ return self.keys()
+
+ def add_action(self, suffix, action):
+ """Add a suffix-action pair to the mapping.
+ """
+ self[suffix] = action
+
+ def __call__(self, target, source, env, for_signature):
+ if not source:
+ return []
+
+ if self.source_ext_match:
+ ext = None
+ for src in map(str, source):
+ my_ext = SCons.Util.splitext(src)[1]
+ if ext and my_ext != ext:
+ raise UserError("While building `%s' from `%s': Cannot build multiple sources with different extensions: %s, %s" % (repr(map(str, target)), src, ext, my_ext))
+ ext = my_ext
+ else:
+ ext = SCons.Util.splitext(str(source[0]))[1]
+
+ if not ext:
+ raise UserError("While building `%s': Cannot deduce file extension from source files: %s" % (repr(map(str, target)), repr(map(str, source))))
+
+ try:
+ ret = SCons.Util.Selector.__call__(self, env, source)
+ except KeyError, e:
+ raise UserError("Ambiguous suffixes after environment substitution: %s == %s == %s" % (e[0], e[1], e[2]))
+ if ret is None:
+ raise UserError("While building `%s' from `%s': Don't know how to build from a source file with suffix `%s'. Expected a suffix in this list: %s." % \
+ (repr(map(str, target)), repr(map(str, source)), ext, repr(self.keys())))
+ return ret
+
+class CallableSelector(SCons.Util.Selector):
+ """A callable dictionary that will, in turn, call the value it
+ finds if it can."""
+ def __call__(self, env, source):
+ value = SCons.Util.Selector.__call__(self, env, source)
+ if callable(value):
+ value = value(env, source)
+ return value
+
+class DictEmitter(SCons.Util.Selector):
+ """A callable dictionary that maps file suffixes to emitters.
+ When called, it finds the right emitter in its dictionary for the
+ suffix of the first source file, and calls that emitter to get the
+ right lists of targets and sources to return. If there's no emitter
+ for the suffix in its dictionary, the original target and source are
+ returned.
+ """
+ def __call__(self, target, source, env):
+ emitter = SCons.Util.Selector.__call__(self, env, source)
+ if emitter:
+ target, source = emitter(target, source, env)
+ return (target, source)
+
+class ListEmitter(UserList.UserList):
+ """A callable list of emitters that calls each in sequence,
+ returning the result.
+ """
+ def __call__(self, target, source, env):
+ for e in self.data:
+ target, source = e(target, source, env)
+ return (target, source)
+
+# These are a common errors when calling a Builder;
+# they are similar to the 'target' and 'source' keyword args to builders,
+# so we issue warnings when we see them. The warnings can, of course,
+# be disabled.
+misleading_keywords = {
+ 'targets' : 'target',
+ 'sources' : 'source',
+}
+
+class OverrideWarner(UserDict.UserDict):
+ """A class for warning about keyword arguments that we use as
+ overrides in a Builder call.
+
+ This class exists to handle the fact that a single Builder call
+ can actually invoke multiple builders. This class only emits the
+ warnings once, no matter how many Builders are invoked.
+ """
+ def __init__(self, dict):
+ UserDict.UserDict.__init__(self, dict)
+ if __debug__: logInstanceCreation(self, 'Builder.OverrideWarner')
+ self.already_warned = None
+ def warn(self):
+ if self.already_warned:
+ return
+ for k in self.keys():
+ if misleading_keywords.has_key(k):
+ alt = misleading_keywords[k]
+ msg = "Did you mean to use `%s' instead of `%s'?" % (alt, k)
+ SCons.Warnings.warn(SCons.Warnings.MisleadingKeywordsWarning, msg)
+ self.already_warned = 1
+
+def Builder(**kw):
+ """A factory for builder objects."""
+ composite = None
+ if kw.has_key('generator'):
+ if kw.has_key('action'):
+ raise UserError, "You must not specify both an action and a generator."
+ kw['action'] = SCons.Action.CommandGeneratorAction(kw['generator'], {})
+ del kw['generator']
+ elif kw.has_key('action'):
+ source_ext_match = kw.get('source_ext_match', 1)
+ if kw.has_key('source_ext_match'):
+ del kw['source_ext_match']
+ if SCons.Util.is_Dict(kw['action']):
+ composite = DictCmdGenerator(kw['action'], source_ext_match)
+ kw['action'] = SCons.Action.CommandGeneratorAction(composite, {})
+ kw['src_suffix'] = composite.src_suffixes()
+ else:
+ kw['action'] = SCons.Action.Action(kw['action'])
+
+ if kw.has_key('emitter'):
+ emitter = kw['emitter']
+ if SCons.Util.is_String(emitter):
+ # This allows users to pass in an Environment
+ # variable reference (like "$FOO") as an emitter.
+ # We will look in that Environment variable for
+ # a callable to use as the actual emitter.
+ var = SCons.Util.get_environment_var(emitter)
+ if not var:
+ raise UserError, "Supplied emitter '%s' does not appear to refer to an Environment variable" % emitter
+ kw['emitter'] = EmitterProxy(var)
+ elif SCons.Util.is_Dict(emitter):
+ kw['emitter'] = DictEmitter(emitter)
+ elif SCons.Util.is_List(emitter):
+ kw['emitter'] = ListEmitter(emitter)
+
+ result = apply(BuilderBase, (), kw)
+
+ if not composite is None:
+ result = CompositeBuilder(result, composite)
+
+ return result
+
+def _node_errors(builder, env, tlist, slist):
+ """Validate that the lists of target and source nodes are
+ legal for this builder and environment. Raise errors or
+ issue warnings as appropriate.
+ """
+
+ # First, figure out if there are any errors in the way the targets
+ # were specified.
+ for t in tlist:
+ if t.side_effect:
+ raise UserError, "Multiple ways to build the same target were specified for: %s" % t
+ if t.has_explicit_builder():
+ if not t.env is None and not t.env is env:
+ action = t.builder.action
+ t_contents = action.get_contents(tlist, slist, t.env)
+ contents = action.get_contents(tlist, slist, env)
+
+ if t_contents == contents:
+ msg = "Two different environments were specified for target %s,\n\tbut they appear to have the same action: %s" % (t, action.genstring(tlist, slist, t.env))
+ SCons.Warnings.warn(SCons.Warnings.DuplicateEnvironmentWarning, msg)
+ else:
+ msg = "Two environments with different actions were specified for the same target: %s" % t
+ raise UserError, msg
+ if builder.multi:
+ if t.builder != builder:
+ msg = "Two different builders (%s and %s) were specified for the same target: %s" % (t.builder.get_name(env), builder.get_name(env), t)
+ raise UserError, msg
+ if t.get_executor().targets != tlist:
+ msg = "Two different target lists have a target in common: %s (from %s and from %s)" % (t, map(str, t.get_executor().targets), map(str, tlist))
+ raise UserError, msg
+ elif t.sources != slist:
+ msg = "Multiple ways to build the same target were specified for: %s (from %s and from %s)" % (t, map(str, t.sources), map(str, slist))
+ raise UserError, msg
+
+ if builder.single_source:
+ if len(slist) > 1:
+ raise UserError, "More than one source given for single-source builder: targets=%s sources=%s" % (map(str,tlist), map(str,slist))
+
+class EmitterProxy:
+ """This is a callable class that can act as a
+ Builder emitter. It holds on to a string that
+ is a key into an Environment dictionary, and will
+ look there at actual build time to see if it holds
+ a callable. If so, we will call that as the actual
+ emitter."""
+ def __init__(self, var):
+ self.var = SCons.Util.to_String(var)
+
+ def __call__(self, target, source, env):
+ emitter = self.var
+
+ # Recursively substitute the variable.
+ # We can't use env.subst() because it deals only
+ # in strings. Maybe we should change that?
+ while SCons.Util.is_String(emitter) and env.has_key(emitter):
+ emitter = env[emitter]
+ if callable(emitter):
+ target, source = emitter(target, source, env)
+ elif SCons.Util.is_List(emitter):
+ for e in emitter:
+ target, source = e(target, source, env)
+
+ return (target, source)
+
+
+ def __cmp__(self, other):
+ return cmp(self.var, other.var)
+
+class BuilderBase:
+ """Base class for Builders, objects that create output
+ nodes (files) from input nodes (files).
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ def __init__(self, action = None,
+ prefix = '',
+ suffix = '',
+ src_suffix = '',
+ target_factory = None,
+ source_factory = None,
+ target_scanner = None,
+ source_scanner = None,
+ emitter = None,
+ multi = 0,
+ env = None,
+ single_source = 0,
+ name = None,
+ chdir = _null,
+ is_explicit = 1,
+ src_builder = None,
+ ensure_suffix = False,
+ **overrides):
+ if __debug__: logInstanceCreation(self, 'Builder.BuilderBase')
+ self._memo = {}
+ self.action = action
+ self.multi = multi
+ if SCons.Util.is_Dict(prefix):
+ prefix = CallableSelector(prefix)
+ self.prefix = prefix
+ if SCons.Util.is_Dict(suffix):
+ suffix = CallableSelector(suffix)
+ self.env = env
+ self.single_source = single_source
+ if overrides.has_key('overrides'):
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
+ "The \"overrides\" keyword to Builder() creation has been deprecated;\n" +\
+ "\tspecify the items as keyword arguments to the Builder() call instead.")
+ overrides.update(overrides['overrides'])
+ del overrides['overrides']
+ if overrides.has_key('scanner'):
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
+ "The \"scanner\" keyword to Builder() creation has been deprecated;\n"
+ "\tuse: source_scanner or target_scanner as appropriate.")
+ del overrides['scanner']
+ self.overrides = overrides
+
+ self.set_suffix(suffix)
+ self.set_src_suffix(src_suffix)
+ self.ensure_suffix = ensure_suffix
+
+ self.target_factory = target_factory
+ self.source_factory = source_factory
+ self.target_scanner = target_scanner
+ self.source_scanner = source_scanner
+
+ self.emitter = emitter
+
+ # Optional Builder name should only be used for Builders
+ # that don't get attached to construction environments.
+ if name:
+ self.name = name
+ self.executor_kw = {}
+ if not chdir is _null:
+ self.executor_kw['chdir'] = chdir
+ self.is_explicit = is_explicit
+
+ if src_builder is None:
+ src_builder = []
+ elif not SCons.Util.is_List(src_builder):
+ src_builder = [ src_builder ]
+ self.src_builder = src_builder
+
+ def __nonzero__(self):
+ raise InternalError, "Do not test for the Node.builder attribute directly; use Node.has_builder() instead"
+
+ def get_name(self, env):
+ """Attempts to get the name of the Builder.
+
+ Look at the BUILDERS variable of env, expecting it to be a
+ dictionary containing this Builder, and return the key of the
+ dictionary. If there's no key, then return a directly-configured
+ name (if there is one) or the name of the class (by default)."""
+
+ try:
+ index = env['BUILDERS'].values().index(self)
+ return env['BUILDERS'].keys()[index]
+ except (AttributeError, KeyError, TypeError, ValueError):
+ try:
+ return self.name
+ except AttributeError:
+ return str(self.__class__)
+
+ def __cmp__(self, other):
+ return cmp(self.__dict__, other.__dict__)
+
+ def splitext(self, path, env=None):
+ if not env:
+ env = self.env
+ if env:
+ matchsuf = filter(lambda S,path=path: path[-len(S):] == S,
+ self.src_suffixes(env))
+ if matchsuf:
+ suf = max(map(None, map(len, matchsuf), matchsuf))[1]
+ return [path[:-len(suf)], path[-len(suf):]]
+ return SCons.Util.splitext(path)
+
+ def get_single_executor(self, env, tlist, slist, executor_kw):
+ if not self.action:
+ raise UserError, "Builder %s must have an action to build %s."%(self.get_name(env or self.env), map(str,tlist))
+ return self.action.get_executor(env or self.env,
+ [], # env already has overrides
+ tlist,
+ slist,
+ executor_kw)
+
+ def get_multi_executor(self, env, tlist, slist, executor_kw):
+ try:
+ executor = tlist[0].get_executor(create = 0)
+ except (AttributeError, IndexError):
+ return self.get_single_executor(env, tlist, slist, executor_kw)
+ else:
+ executor.add_sources(slist)
+ return executor
+
+ def _adjustixes(self, files, pre, suf, ensure_suffix=False):
+ if not files:
+ return []
+ result = []
+ if not SCons.Util.is_List(files):
+ files = [files]
+
+ for f in files:
+ if SCons.Util.is_String(f):
+ f = SCons.Util.adjustixes(f, pre, suf, ensure_suffix)
+ result.append(f)
+ return result
+
+ def _create_nodes(self, env, target = None, source = None):
+ """Create and return lists of target and source nodes.
+ """
+ src_suf = self.get_src_suffix(env)
+
+ target_factory = env.get_factory(self.target_factory)
+ source_factory = env.get_factory(self.source_factory)
+
+ source = self._adjustixes(source, None, src_suf)
+ slist = env.arg2nodes(source, source_factory)
+
+ pre = self.get_prefix(env, slist)
+ suf = self.get_suffix(env, slist)
+
+ if target is None:
+ try:
+ t_from_s = slist[0].target_from_source
+ except AttributeError:
+ raise UserError("Do not know how to create a target from source `%s'" % slist[0])
+ except IndexError:
+ tlist = []
+ else:
+ splitext = lambda S,self=self,env=env: self.splitext(S,env)
+ tlist = [ t_from_s(pre, suf, splitext) ]
+ else:
+ target = self._adjustixes(target, pre, suf, self.ensure_suffix)
+ tlist = env.arg2nodes(target, target_factory, target=target, source=source)
+
+ if self.emitter:
+ # The emitter is going to do str(node), but because we're
+ # being called *from* a builder invocation, the new targets
+ # don't yet have a builder set on them and will look like
+ # source files. Fool the emitter's str() calls by setting
+ # up a temporary builder on the new targets.
+ new_targets = []
+ for t in tlist:
+ if not t.is_derived():
+ t.builder_set(self)
+ new_targets.append(t)
+
+ orig_tlist = tlist[:]
+ orig_slist = slist[:]
+
+ target, source = self.emitter(target=tlist, source=slist, env=env)
+
+ # Now delete the temporary builders that we attached to any
+ # new targets, so that _node_errors() doesn't do weird stuff
+ # to them because it thinks they already have builders.
+ for t in new_targets:
+ if t.builder is self:
+ # Only delete the temporary builder if the emitter
+ # didn't change it on us.
+ t.builder_set(None)
+
+ # Have to call arg2nodes yet again, since it is legal for
+ # emitters to spit out strings as well as Node instances.
+ tlist = env.arg2nodes(target, target_factory,
+ target=orig_tlist, source=orig_slist)
+ slist = env.arg2nodes(source, source_factory,
+ target=orig_tlist, source=orig_slist)
+
+ return tlist, slist
+
+ def _execute(self, env, target, source, overwarn={}, executor_kw={}):
+ # We now assume that target and source are lists or None.
+ if self.src_builder:
+ source = self.src_builder_sources(env, source, overwarn)
+
+ if self.single_source and len(source) > 1 and target is None:
+ result = []
+ if target is None: target = [None]*len(source)
+ for tgt, src in zip(target, source):
+ if not tgt is None: tgt = [tgt]
+ if not src is None: src = [src]
+ result.extend(self._execute(env, tgt, src, overwarn))
+ return SCons.Node.NodeList(result)
+
+ overwarn.warn()
+
+ tlist, slist = self._create_nodes(env, target, source)
+
+ # Check for errors with the specified target/source lists.
+ _node_errors(self, env, tlist, slist)
+
+ # The targets are fine, so find or make the appropriate Executor to
+ # build this particular list of targets from this particular list of
+ # sources.
+ if self.multi:
+ get_executor = self.get_multi_executor
+ else:
+ get_executor = self.get_single_executor
+ executor = get_executor(env, tlist, slist, executor_kw)
+
+ # Now set up the relevant information in the target Nodes themselves.
+ for t in tlist:
+ t.cwd = env.fs.getcwd()
+ t.builder_set(self)
+ t.env_set(env)
+ t.add_source(slist)
+ t.set_executor(executor)
+ t.set_explicit(self.is_explicit)
+
+ return SCons.Node.NodeList(tlist)
+
+ def __call__(self, env, target=None, source=None, chdir=_null, **kw):
+ # We now assume that target and source are lists or None.
+ # The caller (typically Environment.BuilderWrapper) is
+ # responsible for converting any scalar values to lists.
+ if chdir is _null:
+ ekw = self.executor_kw
+ else:
+ ekw = self.executor_kw.copy()
+ ekw['chdir'] = chdir
+ if kw:
+ if kw.has_key('srcdir'):
+ def prependDirIfRelative(f, srcdir=kw['srcdir']):
+ import os.path
+ if SCons.Util.is_String(f) and not os.path.isabs(f):
+ f = os.path.join(srcdir, f)
+ return f
+ if not SCons.Util.is_List(source):
+ source = [source]
+ source = map(prependDirIfRelative, source)
+ del kw['srcdir']
+ if self.overrides:
+ env_kw = self.overrides.copy()
+ env_kw.update(kw)
+ else:
+ env_kw = kw
+ else:
+ env_kw = self.overrides
+ env = env.Override(env_kw)
+ return self._execute(env, target, source, OverrideWarner(kw), ekw)
+
+ def adjust_suffix(self, suff):
+ if suff and not suff[0] in [ '.', '_', '$' ]:
+ return '.' + suff
+ return suff
+
+ def get_prefix(self, env, sources=[]):
+ prefix = self.prefix
+ if callable(prefix):
+ prefix = prefix(env, sources)
+ return env.subst(prefix)
+
+ def set_suffix(self, suffix):
+ if not callable(suffix):
+ suffix = self.adjust_suffix(suffix)
+ self.suffix = suffix
+
+ def get_suffix(self, env, sources=[]):
+ suffix = self.suffix
+ if callable(suffix):
+ suffix = suffix(env, sources)
+ return env.subst(suffix)
+
+ def set_src_suffix(self, src_suffix):
+ if not src_suffix:
+ src_suffix = []
+ elif not SCons.Util.is_List(src_suffix):
+ src_suffix = [ src_suffix ]
+ adjust = lambda suf, s=self: \
+ callable(suf) and suf or s.adjust_suffix(suf)
+ self.src_suffix = map(adjust, src_suffix)
+
+ def get_src_suffix(self, env):
+ """Get the first src_suffix in the list of src_suffixes."""
+ ret = self.src_suffixes(env)
+ if not ret:
+ return ''
+ return ret[0]
+
+ def add_emitter(self, suffix, emitter):
+ """Add a suffix-emitter mapping to this Builder.
+
+ This assumes that emitter has been initialized with an
+ appropriate dictionary type, and will throw a TypeError if
+ not, so the caller is responsible for knowing that this is an
+ appropriate method to call for the Builder in question.
+ """
+ self.emitter[suffix] = emitter
+
+ def add_src_builder(self, builder):
+ """
+ Add a new Builder to the list of src_builders.
+
+ This requires wiping out cached values so that the computed
+ lists of source suffixes get re-calculated.
+ """
+ self._memo = {}
+ self.src_builder.append(builder)
+
+ def _get_sdict(self, env):
+ """
+ Returns a dictionary mapping all of the source suffixes of all
+ src_builders of this Builder to the underlying Builder that
+ should be called first.
+
+ This dictionary is used for each target specified, so we save a
+ lot of extra computation by memoizing it for each construction
+ environment.
+
+ Note that this is re-computed each time, not cached, because there
+ might be changes to one of our source Builders (or one of their
+ source Builders, and so on, and so on...) that we can't "see."
+
+ The underlying methods we call cache their computed values,
+ though, so we hope repeatedly aggregating them into a dictionary
+ like this won't be too big a hit. We may need to look for a
+ better way to do this if performance data show this has turned
+ into a significant bottleneck.
+ """
+ sdict = {}
+ for bld in self.get_src_builders(env):
+ for suf in bld.src_suffixes(env):
+ sdict[suf] = bld
+ return sdict
+
+ def src_builder_sources(self, env, source, overwarn={}):
+ sdict = self._get_sdict(env)
+
+ src_suffixes = self.src_suffixes(env)
+
+ lengths = list(set(map(len, src_suffixes)))
+
+ def match_src_suffix(name, src_suffixes=src_suffixes, lengths=lengths):
+ node_suffixes = map(lambda l, n=name: n[-l:], lengths)
+ for suf in src_suffixes:
+ if suf in node_suffixes:
+ return suf
+ return None
+
+ result = []
+ for s in SCons.Util.flatten(source):
+ if SCons.Util.is_String(s):
+ match_suffix = match_src_suffix(env.subst(s))
+ if not match_suffix and not '.' in s:
+ src_suf = self.get_src_suffix(env)
+ s = self._adjustixes(s, None, src_suf)[0]
+ else:
+ match_suffix = match_src_suffix(s.name)
+ if match_suffix:
+ try:
+ bld = sdict[match_suffix]
+ except KeyError:
+ result.append(s)
+ else:
+ tlist = bld._execute(env, None, [s], overwarn)
+ # If the subsidiary Builder returned more than one
+ # target, then filter out any sources that this
+ # Builder isn't capable of building.
+ if len(tlist) > 1:
+ mss = lambda t, m=match_src_suffix: m(t.name)
+ tlist = filter(mss, tlist)
+ result.extend(tlist)
+ else:
+ result.append(s)
+
+ source_factory = env.get_factory(self.source_factory)
+
+ return env.arg2nodes(result, source_factory)
+
+ def _get_src_builders_key(self, env):
+ return id(env)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('get_src_builders', _get_src_builders_key))
+
+ def get_src_builders(self, env):
+ """
+ Returns the list of source Builders for this Builder.
+
+ This exists mainly to look up Builders referenced as
+ strings in the 'BUILDER' variable of the construction
+ environment and cache the result.
+ """
+ memo_key = id(env)
+ try:
+ memo_dict = self._memo['get_src_builders']
+ except KeyError:
+ memo_dict = {}
+ self._memo['get_src_builders'] = memo_dict
+ else:
+ try:
+ return memo_dict[memo_key]
+ except KeyError:
+ pass
+
+ builders = []
+ for bld in self.src_builder:
+ if SCons.Util.is_String(bld):
+ try:
+ bld = env['BUILDERS'][bld]
+ except KeyError:
+ continue
+ builders.append(bld)
+
+ memo_dict[memo_key] = builders
+ return builders
+
+ def _subst_src_suffixes_key(self, env):
+ return id(env)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('subst_src_suffixes', _subst_src_suffixes_key))
+
+ def subst_src_suffixes(self, env):
+ """
+ The suffix list may contain construction variable expansions,
+ so we have to evaluate the individual strings. To avoid doing
+ this over and over, we memoize the results for each construction
+ environment.
+ """
+ memo_key = id(env)
+ try:
+ memo_dict = self._memo['subst_src_suffixes']
+ except KeyError:
+ memo_dict = {}
+ self._memo['subst_src_suffixes'] = memo_dict
+ else:
+ try:
+ return memo_dict[memo_key]
+ except KeyError:
+ pass
+ suffixes = map(lambda x, s=self, e=env: e.subst(x), self.src_suffix)
+ memo_dict[memo_key] = suffixes
+ return suffixes
+
+ def src_suffixes(self, env):
+ """
+ Returns the list of source suffixes for all src_builders of this
+ Builder.
+
+ This is essentially a recursive descent of the src_builder "tree."
+ (This value isn't cached because there may be changes in a
+ src_builder many levels deep that we can't see.)
+ """
+ sdict = {}
+ suffixes = self.subst_src_suffixes(env)
+ for s in suffixes:
+ sdict[s] = 1
+ for builder in self.get_src_builders(env):
+ for s in builder.src_suffixes(env):
+ if not sdict.has_key(s):
+ sdict[s] = 1
+ suffixes.append(s)
+ return suffixes
+
+class CompositeBuilder(SCons.Util.Proxy):
+ """A Builder Proxy whose main purpose is to always have
+ a DictCmdGenerator as its action, and to provide access
+ to the DictCmdGenerator's add_action() method.
+ """
+
+ def __init__(self, builder, cmdgen):
+ if __debug__: logInstanceCreation(self, 'Builder.CompositeBuilder')
+ SCons.Util.Proxy.__init__(self, builder)
+
+ # cmdgen should always be an instance of DictCmdGenerator.
+ self.cmdgen = cmdgen
+ self.builder = builder
+
+ def add_action(self, suffix, action):
+ self.cmdgen.add_action(suffix, action)
+ self.set_src_suffix(self.cmdgen.src_suffixes())
diff --git a/tools/scons/scons-local-1.2.0/SCons/CacheDir.py b/tools/scons/scons-local-1.2.0/SCons/CacheDir.py
new file mode 100644
index 000000000..6eb6f173b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/CacheDir.py
@@ -0,0 +1,217 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/CacheDir.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """
+CacheDir support
+"""
+
+import os.path
+import stat
+import string
+import sys
+
+import SCons.Action
+
+cache_enabled = True
+cache_debug = False
+cache_force = False
+cache_show = False
+
+def CacheRetrieveFunc(target, source, env):
+ t = target[0]
+ fs = t.fs
+ cd = env.get_CacheDir()
+ cachedir, cachefile = cd.cachepath(t)
+ if not fs.exists(cachefile):
+ cd.CacheDebug('CacheRetrieve(%s): %s not in cache\n', t, cachefile)
+ return 1
+ cd.CacheDebug('CacheRetrieve(%s): retrieving from %s\n', t, cachefile)
+ if SCons.Action.execute_actions:
+ if fs.islink(cachefile):
+ fs.symlink(fs.readlink(cachefile), t.path)
+ else:
+ env.copy_from_cache(cachefile, t.path)
+ st = fs.stat(cachefile)
+ fs.chmod(t.path, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+ return 0
+
+def CacheRetrieveString(target, source, env):
+ t = target[0]
+ fs = t.fs
+ cd = env.get_CacheDir()
+ cachedir, cachefile = cd.cachepath(t)
+ if t.fs.exists(cachefile):
+ return "Retrieved `%s' from cache" % t.path
+ return None
+
+CacheRetrieve = SCons.Action.Action(CacheRetrieveFunc, CacheRetrieveString)
+
+CacheRetrieveSilent = SCons.Action.Action(CacheRetrieveFunc, None)
+
+def CachePushFunc(target, source, env):
+ t = target[0]
+ if t.nocache:
+ return
+ fs = t.fs
+ cd = env.get_CacheDir()
+ cachedir, cachefile = cd.cachepath(t)
+ if fs.exists(cachefile):
+ # Don't bother copying it if it's already there. Note that
+ # usually this "shouldn't happen" because if the file already
+ # existed in cache, we'd have retrieved the file from there,
+ # not built it. This can happen, though, in a race, if some
+ # other person running the same build pushes their copy to
+ # the cache after we decide we need to build it but before our
+ # build completes.
+ cd.CacheDebug('CachePush(%s): %s already exists in cache\n', t, cachefile)
+ return
+
+ cd.CacheDebug('CachePush(%s): pushing to %s\n', t, cachefile)
+
+ tempfile = cachefile+'.tmp'+str(os.getpid())
+ errfmt = "Unable to copy %s to cache. Cache file is %s"
+
+ if not fs.isdir(cachedir):
+ try:
+ fs.makedirs(cachedir)
+ except EnvironmentError:
+ # We may have received an exception because another process
+ # has beaten us creating the directory.
+ if not fs.isdir(cachedir):
+ msg = errfmt % (str(target), cachefile)
+ raise SCons.Errors.EnvironmentError, msg
+
+ try:
+ if fs.islink(t.path):
+ fs.symlink(fs.readlink(t.path), tempfile)
+ else:
+ fs.copy2(t.path, tempfile)
+ fs.rename(tempfile, cachefile)
+ st = fs.stat(t.path)
+ fs.chmod(cachefile, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+ except EnvironmentError:
+ # It's possible someone else tried writing the file at the
+ # same time we did, or else that there was some problem like
+ # the CacheDir being on a separate file system that's full.
+ # In any case, inability to push a file to cache doesn't affect
+ # the correctness of the build, so just print a warning.
+ msg = errfmt % (str(target), cachefile)
+ SCons.Warnings.warn(SCons.Warnings.CacheWriteErrorWarning, msg)
+
+CachePush = SCons.Action.Action(CachePushFunc, None)
+
+class CacheDir:
+
+ def __init__(self, path):
+ try:
+ import hashlib
+ except ImportError:
+ msg = "No hashlib or MD5 module available, CacheDir() not supported"
+ SCons.Warnings.warn(SCons.Warnings.NoMD5ModuleWarning, msg)
+ self.path = None
+ else:
+ self.path = path
+ self.current_cache_debug = None
+ self.debugFP = None
+
+ def CacheDebug(self, fmt, target, cachefile):
+ if cache_debug != self.current_cache_debug:
+ if cache_debug == '-':
+ self.debugFP = sys.stdout
+ elif cache_debug:
+ self.debugFP = open(cache_debug, 'w')
+ else:
+ self.debugFP = None
+ self.current_cache_debug = cache_debug
+ if self.debugFP:
+ self.debugFP.write(fmt % (target, os.path.split(cachefile)[1]))
+
+ def is_enabled(self):
+ return (cache_enabled and not self.path is None)
+
+ def cachepath(self, node):
+ """
+ """
+ if not self.is_enabled():
+ return None, None
+
+ sig = node.get_cachedir_bsig()
+ subdir = string.upper(sig[0])
+ dir = os.path.join(self.path, subdir)
+ return dir, os.path.join(dir, sig)
+
+ def retrieve(self, node):
+ """
+ This method is called from multiple threads in a parallel build,
+ so only do thread safe stuff here. Do thread unsafe stuff in
+ built().
+
+ Note that there's a special trick here with the execute flag
+ (one that's not normally done for other actions). Basically
+ if the user requested a no_exec (-n) build, then
+ SCons.Action.execute_actions is set to 0 and when any action
+ is called, it does its showing but then just returns zero
+ instead of actually calling the action execution operation.
+ The problem for caching is that if the file does NOT exist in
+ cache then the CacheRetrieveString won't return anything to
+ show for the task, but the Action.__call__ won't call
+ CacheRetrieveFunc; instead it just returns zero, which makes
+ the code below think that the file *was* successfully
+ retrieved from the cache, therefore it doesn't do any
+ subsequent building. However, the CacheRetrieveString didn't
+ print anything because it didn't actually exist in the cache,
+ and no more build actions will be performed, so the user just
+ sees nothing. The fix is to tell Action.__call__ to always
+ execute the CacheRetrieveFunc and then have the latter
+ explicitly check SCons.Action.execute_actions itself.
+ """
+ if not self.is_enabled():
+ return False
+
+ retrieved = False
+
+ if cache_show:
+ if CacheRetrieveSilent(node, [], node.get_build_env(), execute=1) == 0:
+ node.build(presub=0, execute=0)
+ retrieved = 1
+ else:
+ if CacheRetrieve(node, [], node.get_build_env(), execute=1) == 0:
+ retrieved = 1
+ if retrieved:
+ # Record build signature information, but don't
+ # push it out to cache. (We just got it from there!)
+ node.set_state(SCons.Node.executed)
+ SCons.Node.Node.built(node)
+
+ return retrieved
+
+ def push(self, node):
+ if not self.is_enabled():
+ return
+ return CachePush(node, [], node.get_build_env())
+
+ def push_if_forced(self, node):
+ if cache_force:
+ return self.push(node)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Conftest.py b/tools/scons/scons-local-1.2.0/SCons/Conftest.py
new file mode 100644
index 000000000..ba7dbf136
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Conftest.py
@@ -0,0 +1,778 @@
+"""SCons.Conftest
+
+Autoconf-like configuration support; low level implementation of tests.
+"""
+
+#
+# Copyright (c) 2003 Stichting NLnet Labs
+# Copyright (c) 2001, 2002, 2003 Steven Knight
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+#
+# The purpose of this module is to define how a check is to be performed.
+# Use one of the Check...() functions below.
+#
+
+#
+# A context class is used that defines functions for carrying out the tests,
+# logging and messages. The following methods and members must be present:
+#
+# context.Display(msg) Function called to print messages that are normally
+# displayed for the user. Newlines are explicitly used.
+# The text should also be written to the logfile!
+#
+# context.Log(msg) Function called to write to a log file.
+#
+# context.BuildProg(text, ext)
+# Function called to build a program, using "ext" for the
+# file extention. Must return an empty string for
+# success, an error message for failure.
+# For reliable test results building should be done just
+# like an actual program would be build, using the same
+# command and arguments (including configure results so
+# far).
+#
+# context.CompileProg(text, ext)
+# Function called to compile a program, using "ext" for
+# the file extention. Must return an empty string for
+# success, an error message for failure.
+# For reliable test results compiling should be done just
+# like an actual source file would be compiled, using the
+# same command and arguments (including configure results
+# so far).
+#
+# context.AppendLIBS(lib_name_list)
+# Append "lib_name_list" to the value of LIBS.
+# "lib_namelist" is a list of strings.
+# Return the value of LIBS before changing it (any type
+# can be used, it is passed to SetLIBS() later.
+#
+# context.SetLIBS(value)
+# Set LIBS to "value". The type of "value" is what
+# AppendLIBS() returned.
+# Return the value of LIBS before changing it (any type
+# can be used, it is passed to SetLIBS() later.
+#
+# context.headerfilename
+# Name of file to append configure results to, usually
+# "confdefs.h".
+# The file must not exist or be empty when starting.
+# Empty or None to skip this (some tests will not work!).
+#
+# context.config_h (may be missing). If present, must be a string, which
+# will be filled with the contents of a config_h file.
+#
+# context.vardict Dictionary holding variables used for the tests and
+# stores results from the tests, used for the build
+# commands.
+# Normally contains "CC", "LIBS", "CPPFLAGS", etc.
+#
+# context.havedict Dictionary holding results from the tests that are to
+# be used inside a program.
+# Names often start with "HAVE_". These are zero
+# (feature not present) or one (feature present). Other
+# variables may have any value, e.g., "PERLVERSION" can
+# be a number and "SYSTEMNAME" a string.
+#
+
+import re
+import string
+from types import IntType
+
+#
+# PUBLIC VARIABLES
+#
+
+LogInputFiles = 1 # Set that to log the input files in case of a failed test
+LogErrorMessages = 1 # Set that to log Conftest-generated error messages
+
+#
+# PUBLIC FUNCTIONS
+#
+
+# Generic remarks:
+# - When a language is specified which is not supported the test fails. The
+# message is a bit different, because not all the arguments for the normal
+# message are available yet (chicken-egg problem).
+
+
+def CheckBuilder(context, text = None, language = None):
+ """
+ Configure check to see if the compiler works.
+ Note that this uses the current value of compiler and linker flags, make
+ sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
+ "language" should be "C" or "C++" and is used to select the compiler.
+ Default is "C".
+ "text" may be used to specify the code to be build.
+ Returns an empty string for success, an error message for failure.
+ """
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("%s\n" % msg)
+ return msg
+
+ if not text:
+ text = """
+int main() {
+ return 0;
+}
+"""
+
+ context.Display("Checking if building a %s file works... " % lang)
+ ret = context.BuildProg(text, suffix)
+ _YesNoResult(context, ret, None, text)
+ return ret
+
+def CheckCC(context):
+ """
+ Configure check for a working C compiler.
+
+ This checks whether the C compiler, as defined in the $CC construction
+ variable, can compile a C source file. It uses the current $CCCOM value
+ too, so that it can test against non working flags.
+
+ """
+ context.Display("Checking whether the C compiler works")
+ text = """
+int main()
+{
+ return 0;
+}
+"""
+ ret = _check_empty_program(context, 'CC', text, 'C')
+ _YesNoResult(context, ret, None, text)
+ return ret
+
+def CheckSHCC(context):
+ """
+ Configure check for a working shared C compiler.
+
+ This checks whether the C compiler, as defined in the $SHCC construction
+ variable, can compile a C source file. It uses the current $SHCCCOM value
+ too, so that it can test against non working flags.
+
+ """
+ context.Display("Checking whether the (shared) C compiler works")
+ text = """
+int foo()
+{
+ return 0;
+}
+"""
+ ret = _check_empty_program(context, 'SHCC', text, 'C', use_shared = True)
+ _YesNoResult(context, ret, None, text)
+ return ret
+
+def CheckCXX(context):
+ """
+ Configure check for a working CXX compiler.
+
+ This checks whether the CXX compiler, as defined in the $CXX construction
+ variable, can compile a CXX source file. It uses the current $CXXCOM value
+ too, so that it can test against non working flags.
+
+ """
+ context.Display("Checking whether the C++ compiler works")
+ text = """
+int main()
+{
+ return 0;
+}
+"""
+ ret = _check_empty_program(context, 'CXX', text, 'C++')
+ _YesNoResult(context, ret, None, text)
+ return ret
+
+def CheckSHCXX(context):
+ """
+ Configure check for a working shared CXX compiler.
+
+ This checks whether the CXX compiler, as defined in the $SHCXX construction
+ variable, can compile a CXX source file. It uses the current $SHCXXCOM value
+ too, so that it can test against non working flags.
+
+ """
+ context.Display("Checking whether the (shared) C++ compiler works")
+ text = """
+int main()
+{
+ return 0;
+}
+"""
+ ret = _check_empty_program(context, 'SHCXX', text, 'C++', use_shared = True)
+ _YesNoResult(context, ret, None, text)
+ return ret
+
+def _check_empty_program(context, comp, text, language, use_shared = False):
+ """Return 0 on success, 1 otherwise."""
+ if not context.env.has_key(comp) or not context.env[comp]:
+ # The compiler construction variable is not set or empty
+ return 1
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ return 1
+
+ if use_shared:
+ return context.CompileSharedObject(text, suffix)
+ else:
+ return context.CompileProg(text, suffix)
+
+
+def CheckFunc(context, function_name, header = None, language = None):
+ """
+ Configure check for a function "function_name".
+ "language" should be "C" or "C++" and is used to select the compiler.
+ Default is "C".
+ Optional "header" can be defined to define a function prototype, include a
+ header file or anything else that comes before main().
+ Sets HAVE_function_name in context.havedict according to the result.
+ Note that this uses the current value of compiler and linker flags, make
+ sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
+ Returns an empty string for success, an error message for failure.
+ """
+
+ # Remarks from autoconf:
+ # - Don't include <ctype.h> because on OSF/1 3.0 it includes <sys/types.h>
+ # which includes <sys/select.h> which contains a prototype for select.
+ # Similarly for bzero.
+ # - assert.h is included to define __stub macros and hopefully few
+ # prototypes, which can conflict with char $1(); below.
+ # - Override any gcc2 internal prototype to avoid an error.
+ # - We use char for the function declaration because int might match the
+ # return type of a gcc2 builtin and then its argument prototype would
+ # still apply.
+ # - The GNU C library defines this for functions which it implements to
+ # always fail with ENOSYS. Some functions are actually named something
+ # starting with __ and the normal name is an alias.
+
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+ if not header:
+ header = """
+#ifdef __cplusplus
+extern "C"
+#endif
+char %s();""" % function_name
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for %s(): %s\n" % (function_name, msg))
+ return msg
+
+ text = """
+%(include)s
+#include <assert.h>
+%(hdr)s
+
+int main() {
+#if defined (__stub_%(name)s) || defined (__stub___%(name)s)
+ fail fail fail
+#else
+ %(name)s();
+#endif
+
+ return 0;
+}
+""" % { 'name': function_name,
+ 'include': includetext,
+ 'hdr': header }
+
+ context.Display("Checking for %s function %s()... " % (lang, function_name))
+ ret = context.BuildProg(text, suffix)
+ _YesNoResult(context, ret, "HAVE_" + function_name, text,
+ "Define to 1 if the system has the function `%s'." %\
+ function_name)
+ return ret
+
+
+def CheckHeader(context, header_name, header = None, language = None,
+ include_quotes = None):
+ """
+ Configure check for a C or C++ header file "header_name".
+ Optional "header" can be defined to do something before including the
+ header file (unusual, supported for consistency).
+ "language" should be "C" or "C++" and is used to select the compiler.
+ Default is "C".
+ Sets HAVE_header_name in context.havedict according to the result.
+ Note that this uses the current value of compiler and linker flags, make
+ sure $CFLAGS and $CPPFLAGS are set correctly.
+ Returns an empty string for success, an error message for failure.
+ """
+ # Why compile the program instead of just running the preprocessor?
+ # It is possible that the header file exists, but actually using it may
+ # fail (e.g., because it depends on other header files). Thus this test is
+ # more strict. It may require using the "header" argument.
+ #
+ # Use <> by default, because the check is normally used for system header
+ # files. SCons passes '""' to overrule this.
+
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"\n' % context.headerfilename
+ else:
+ includetext = ''
+ if not header:
+ header = ""
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for header file %s: %s\n"
+ % (header_name, msg))
+ return msg
+
+ if not include_quotes:
+ include_quotes = "<>"
+
+ text = "%s%s\n#include %s%s%s\n\n" % (includetext, header,
+ include_quotes[0], header_name, include_quotes[1])
+
+ context.Display("Checking for %s header file %s... " % (lang, header_name))
+ ret = context.CompileProg(text, suffix)
+ _YesNoResult(context, ret, "HAVE_" + header_name, text,
+ "Define to 1 if you have the <%s> header file." % header_name)
+ return ret
+
+
+def CheckType(context, type_name, fallback = None,
+ header = None, language = None):
+ """
+ Configure check for a C or C++ type "type_name".
+ Optional "header" can be defined to include a header file.
+ "language" should be "C" or "C++" and is used to select the compiler.
+ Default is "C".
+ Sets HAVE_type_name in context.havedict according to the result.
+ Note that this uses the current value of compiler and linker flags, make
+ sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
+ Returns an empty string for success, an error message for failure.
+ """
+
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+ if not header:
+ header = ""
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
+ return msg
+
+ # Remarks from autoconf about this test:
+ # - Grepping for the type in include files is not reliable (grep isn't
+ # portable anyway).
+ # - Using "TYPE my_var;" doesn't work for const qualified types in C++.
+ # Adding an initializer is not valid for some C++ classes.
+ # - Using the type as parameter to a function either fails for K&$ C or for
+ # C++.
+ # - Using "TYPE *my_var;" is valid in C for some types that are not
+ # declared (struct something).
+ # - Using "sizeof(TYPE)" is valid when TYPE is actually a variable.
+ # - Using the previous two together works reliably.
+ text = """
+%(include)s
+%(header)s
+
+int main() {
+ if ((%(name)s *) 0)
+ return 0;
+ if (sizeof (%(name)s))
+ return 0;
+}
+""" % { 'include': includetext,
+ 'header': header,
+ 'name': type_name }
+
+ context.Display("Checking for %s type %s... " % (lang, type_name))
+ ret = context.BuildProg(text, suffix)
+ _YesNoResult(context, ret, "HAVE_" + type_name, text,
+ "Define to 1 if the system has the type `%s'." % type_name)
+ if ret and fallback and context.headerfilename:
+ f = open(context.headerfilename, "a")
+ f.write("typedef %s %s;\n" % (fallback, type_name))
+ f.close()
+
+ return ret
+
+def CheckTypeSize(context, type_name, header = None, language = None, expect = None):
+ """This check can be used to get the size of a given type, or to check whether
+ the type is of expected size.
+
+ Arguments:
+ - type : str
+ the type to check
+ - includes : sequence
+ list of headers to include in the test code before testing the type
+ - language : str
+ 'C' or 'C++'
+ - expect : int
+ if given, will test wether the type has the given number of bytes.
+ If not given, will automatically find the size.
+
+ Returns:
+ status : int
+ 0 if the check failed, or the found size of the type if the check succeeded."""
+
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+
+ if not header:
+ header = ""
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for %s type: %s\n" % (type_name, msg))
+ return msg
+
+ src = includetext + header
+ if not expect is None:
+ # Only check if the given size is the right one
+ context.Display('Checking %s is %d bytes... ' % (type_name, expect))
+
+ # test code taken from autoconf: this is a pretty clever hack to find that
+ # a type is of a given size using only compilation. This speeds things up
+ # quite a bit compared to straightforward code using TryRun
+ src = src + r"""
+typedef %s scons_check_type;
+
+int main()
+{
+ static int test_array[1 - 2 * !(((long int) (sizeof(scons_check_type))) == %d)];
+ test_array[0] = 0;
+
+ return 0;
+}
+"""
+
+ st = context.CompileProg(src % (type_name, expect), suffix)
+ if not st:
+ context.Display("yes\n")
+ _Have(context, "SIZEOF_%s" % type_name, expect,
+ "The size of `%s', as computed by sizeof." % type_name)
+ return expect
+ else:
+ context.Display("no\n")
+ _LogFailed(context, src, st)
+ return 0
+ else:
+ # Only check if the given size is the right one
+ context.Message('Checking size of %s ... ' % type_name)
+
+ # We have to be careful with the program we wish to test here since
+ # compilation will be attempted using the current environment's flags.
+ # So make sure that the program will compile without any warning. For
+ # example using: 'int main(int argc, char** argv)' will fail with the
+ # '-Wall -Werror' flags since the variables argc and argv would not be
+ # used in the program...
+ #
+ src = src + """
+#include <stdlib.h>
+#include <stdio.h>
+int main() {
+ printf("%d", (int)sizeof(""" + type_name + """));
+ return 0;
+}
+ """
+ st, out = context.RunProg(src, suffix)
+ try:
+ size = int(out)
+ except ValueError:
+ # If cannot convert output of test prog to an integer (the size),
+ # something went wront, so just fail
+ st = 1
+ size = 0
+
+ if not st:
+ context.Display("yes\n")
+ _Have(context, "SIZEOF_%s" % type_name, size,
+ "The size of `%s', as computed by sizeof." % type_name)
+ return size
+ else:
+ context.Display("no\n")
+ _LogFailed(context, src, st)
+ return 0
+
+ return 0
+
+def CheckDeclaration(context, symbol, includes = None, language = None):
+ """Checks whether symbol is declared.
+
+ Use the same test as autoconf, that is test whether the symbol is defined
+ as a macro or can be used as an r-value.
+
+ Arguments:
+ symbol : str
+ the symbol to check
+ includes : str
+ Optional "header" can be defined to include a header file.
+ language : str
+ only C and C++ supported.
+
+ Returns:
+ status : bool
+ True if the check failed, False if succeeded."""
+
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+
+ if not includes:
+ includes = ""
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for declaration %s: %s\n" % (type_name, msg))
+ return msg
+
+ src = includetext + includes
+ context.Display('Checking whether %s is declared... ' % symbol)
+
+ src = src + r"""
+int main()
+{
+#ifndef %s
+ (void) %s;
+#endif
+ ;
+ return 0;
+}
+""" % (symbol, symbol)
+
+ st = context.CompileProg(src, suffix)
+ _YesNoResult(context, st, "HAVE_DECL_" + symbol, src,
+ "Set to 1 if %s is defined." % symbol)
+ return st
+
+def CheckLib(context, libs, func_name = None, header = None,
+ extra_libs = None, call = None, language = None, autoadd = 1):
+ """
+ Configure check for a C or C++ libraries "libs". Searches through
+ the list of libraries, until one is found where the test succeeds.
+ Tests if "func_name" or "call" exists in the library. Note: if it exists
+ in another library the test succeeds anyway!
+ Optional "header" can be defined to include a header file. If not given a
+ default prototype for "func_name" is added.
+ Optional "extra_libs" is a list of library names to be added after
+ "lib_name" in the build command. To be used for libraries that "lib_name"
+ depends on.
+ Optional "call" replaces the call to "func_name" in the test code. It must
+ consist of complete C statements, including a trailing ";".
+ Both "func_name" and "call" arguments are optional, and in that case, just
+ linking against the libs is tested.
+ "language" should be "C" or "C++" and is used to select the compiler.
+ Default is "C".
+ Note that this uses the current value of compiler and linker flags, make
+ sure $CFLAGS, $CPPFLAGS and $LIBS are set correctly.
+ Returns an empty string for success, an error message for failure.
+ """
+ # Include "confdefs.h" first, so that the header can use HAVE_HEADER_H.
+ if context.headerfilename:
+ includetext = '#include "%s"' % context.headerfilename
+ else:
+ includetext = ''
+ if not header:
+ header = ""
+
+ text = """
+%s
+%s""" % (includetext, header)
+
+ # Add a function declaration if needed.
+ if func_name and func_name != "main":
+ if not header:
+ text = text + """
+#ifdef __cplusplus
+extern "C"
+#endif
+char %s();
+""" % func_name
+
+ # The actual test code.
+ if not call:
+ call = "%s();" % func_name
+
+ # if no function to test, leave main() blank
+ text = text + """
+int
+main() {
+ %s
+return 0;
+}
+""" % (call or "")
+
+ if call:
+ i = string.find(call, "\n")
+ if i > 0:
+ calltext = call[:i] + ".."
+ elif call[-1] == ';':
+ calltext = call[:-1]
+ else:
+ calltext = call
+
+ for lib_name in libs:
+
+ lang, suffix, msg = _lang2suffix(language)
+ if msg:
+ context.Display("Cannot check for library %s: %s\n" % (lib_name, msg))
+ return msg
+
+ # if a function was specified to run in main(), say it
+ if call:
+ context.Display("Checking for %s in %s library %s... "
+ % (calltext, lang, lib_name))
+ # otherwise, just say the name of library and language
+ else:
+ context.Display("Checking for %s library %s... "
+ % (lang, lib_name))
+
+ if lib_name:
+ l = [ lib_name ]
+ if extra_libs:
+ l.extend(extra_libs)
+ oldLIBS = context.AppendLIBS(l)
+ sym = "HAVE_LIB" + lib_name
+ else:
+ oldLIBS = -1
+ sym = None
+
+ ret = context.BuildProg(text, suffix)
+
+ _YesNoResult(context, ret, sym, text,
+ "Define to 1 if you have the `%s' library." % lib_name)
+ if oldLIBS != -1 and (ret or not autoadd):
+ context.SetLIBS(oldLIBS)
+
+ if not ret:
+ return ret
+
+ return ret
+
+#
+# END OF PUBLIC FUNCTIONS
+#
+
+def _YesNoResult(context, ret, key, text, comment = None):
+ """
+ Handle the result of a test with a "yes" or "no" result.
+ "ret" is the return value: empty if OK, error message when not.
+ "key" is the name of the symbol to be defined (HAVE_foo).
+ "text" is the source code of the program used for testing.
+ "comment" is the C comment to add above the line defining the symbol (the
+ comment is automatically put inside a /* */). If None, no comment is added.
+ """
+ if key:
+ _Have(context, key, not ret, comment)
+ if ret:
+ context.Display("no\n")
+ _LogFailed(context, text, ret)
+ else:
+ context.Display("yes\n")
+
+
+def _Have(context, key, have, comment = None):
+ """
+ Store result of a test in context.havedict and context.headerfilename.
+ "key" is a "HAVE_abc" name. It is turned into all CAPITALS and non-
+ alphanumerics are replaced by an underscore.
+ The value of "have" can be:
+ 1 - Feature is defined, add "#define key".
+ 0 - Feature is not defined, add "/* #undef key */".
+ Adding "undef" is what autoconf does. Not useful for the
+ compiler, but it shows that the test was done.
+ number - Feature is defined to this number "#define key have".
+ Doesn't work for 0 or 1, use a string then.
+ string - Feature is defined to this string "#define key have".
+ Give "have" as is should appear in the header file, include quotes
+ when desired and escape special characters!
+ """
+ key_up = string.upper(key)
+ key_up = re.sub('[^A-Z0-9_]', '_', key_up)
+ context.havedict[key_up] = have
+ if have == 1:
+ line = "#define %s 1\n" % key_up
+ elif have == 0:
+ line = "/* #undef %s */\n" % key_up
+ elif type(have) == IntType:
+ line = "#define %s %d\n" % (key_up, have)
+ else:
+ line = "#define %s %s\n" % (key_up, str(have))
+
+ if comment is not None:
+ lines = "\n/* %s */\n" % comment + line
+ else:
+ lines = "\n" + line
+
+ if context.headerfilename:
+ f = open(context.headerfilename, "a")
+ f.write(lines)
+ f.close()
+ elif hasattr(context,'config_h'):
+ context.config_h = context.config_h + lines
+
+
+def _LogFailed(context, text, msg):
+ """
+ Write to the log about a failed program.
+ Add line numbers, so that error messages can be understood.
+ """
+ if LogInputFiles:
+ context.Log("Failed program was:\n")
+ lines = string.split(text, '\n')
+ if len(lines) and lines[-1] == '':
+ lines = lines[:-1] # remove trailing empty line
+ n = 1
+ for line in lines:
+ context.Log("%d: %s\n" % (n, line))
+ n = n + 1
+ if LogErrorMessages:
+ context.Log("Error message: %s\n" % msg)
+
+
+def _lang2suffix(lang):
+ """
+ Convert a language name to a suffix.
+ When "lang" is empty or None C is assumed.
+ Returns a tuple (lang, suffix, None) when it works.
+ For an unrecognized language returns (None, None, msg).
+ Where:
+ lang = the unified language name
+ suffix = the suffix, including the leading dot
+ msg = an error message
+ """
+ if not lang or lang in ["C", "c"]:
+ return ("C", ".c", None)
+ if lang in ["c++", "C++", "cpp", "CXX", "cxx"]:
+ return ("C++", ".cpp", None)
+
+ return None, None, "Unsupported language: %s" % lang
+
+
+# vim: set sw=4 et sts=4 tw=79 fo+=l:
diff --git a/tools/scons/scons-local-1.2.0/SCons/Debug.py b/tools/scons/scons-local-1.2.0/SCons/Debug.py
new file mode 100644
index 000000000..c6485b6fa
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Debug.py
@@ -0,0 +1,216 @@
+"""SCons.Debug
+
+Code for debugging SCons internal things. Not everything here is
+guaranteed to work all the way back to Python 1.5.2, and shouldn't be
+needed by most users.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Debug.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import string
+import sys
+
+# Recipe 14.10 from the Python Cookbook.
+try:
+ import weakref
+except ImportError:
+ def logInstanceCreation(instance, name=None):
+ pass
+else:
+ def logInstanceCreation(instance, name=None):
+ if name is None:
+ name = instance.__class__.__name__
+ if not tracked_classes.has_key(name):
+ tracked_classes[name] = []
+ tracked_classes[name].append(weakref.ref(instance))
+
+
+
+tracked_classes = {}
+
+def string_to_classes(s):
+ if s == '*':
+ c = tracked_classes.keys()
+ c.sort()
+ return c
+ else:
+ return string.split(s)
+
+def fetchLoggedInstances(classes="*"):
+ classnames = string_to_classes(classes)
+ return map(lambda cn: (cn, len(tracked_classes[cn])), classnames)
+
+def countLoggedInstances(classes, file=sys.stdout):
+ for classname in string_to_classes(classes):
+ file.write("%s: %d\n" % (classname, len(tracked_classes[classname])))
+
+def listLoggedInstances(classes, file=sys.stdout):
+ for classname in string_to_classes(classes):
+ file.write('\n%s:\n' % classname)
+ for ref in tracked_classes[classname]:
+ obj = ref()
+ if obj is not None:
+ file.write(' %s\n' % repr(obj))
+
+def dumpLoggedInstances(classes, file=sys.stdout):
+ for classname in string_to_classes(classes):
+ file.write('\n%s:\n' % classname)
+ for ref in tracked_classes[classname]:
+ obj = ref()
+ if obj is not None:
+ file.write(' %s:\n' % obj)
+ for key, value in obj.__dict__.items():
+ file.write(' %20s : %s\n' % (key, value))
+
+
+
+if sys.platform[:5] == "linux":
+ # Linux doesn't actually support memory usage stats from getrusage().
+ def memory():
+ mstr = open('/proc/self/stat').read()
+ mstr = string.split(mstr)[22]
+ return int(mstr)
+else:
+ try:
+ import resource
+ except ImportError:
+ try:
+ import win32process
+ import win32api
+ except ImportError:
+ def memory():
+ return 0
+ else:
+ def memory():
+ process_handle = win32api.GetCurrentProcess()
+ memory_info = win32process.GetProcessMemoryInfo( process_handle )
+ return memory_info['PeakWorkingSetSize']
+ else:
+ def memory():
+ res = resource.getrusage(resource.RUSAGE_SELF)
+ return res[4]
+
+# returns caller's stack
+def caller_stack(*backlist):
+ import traceback
+ if not backlist:
+ backlist = [0]
+ result = []
+ for back in backlist:
+ tb = traceback.extract_stack(limit=3+back)
+ key = tb[0][:3]
+ result.append('%s:%d(%s)' % func_shorten(key))
+ return result
+
+caller_bases = {}
+caller_dicts = {}
+
+# trace a caller's stack
+def caller_trace(back=0):
+ import traceback
+ tb = traceback.extract_stack(limit=3+back)
+ tb.reverse()
+ callee = tb[1][:3]
+ caller_bases[callee] = caller_bases.get(callee, 0) + 1
+ for caller in tb[2:]:
+ caller = callee + caller[:3]
+ try:
+ entry = caller_dicts[callee]
+ except KeyError:
+ caller_dicts[callee] = entry = {}
+ entry[caller] = entry.get(caller, 0) + 1
+ callee = caller
+
+# print a single caller and its callers, if any
+def _dump_one_caller(key, file, level=0):
+ l = []
+ for c,v in caller_dicts[key].items():
+ l.append((-v,c))
+ l.sort()
+ leader = ' '*level
+ for v,c in l:
+ file.write("%s %6d %s:%d(%s)\n" % ((leader,-v) + func_shorten(c[-3:])))
+ if caller_dicts.has_key(c):
+ _dump_one_caller(c, file, level+1)
+
+# print each call tree
+def dump_caller_counts(file=sys.stdout):
+ keys = caller_bases.keys()
+ keys.sort()
+ for k in keys:
+ file.write("Callers of %s:%d(%s), %d calls:\n"
+ % (func_shorten(k) + (caller_bases[k],)))
+ _dump_one_caller(k, file)
+
+shorten_list = [
+ ( '/scons/SCons/', 1),
+ ( '/src/engine/SCons/', 1),
+ ( '/usr/lib/python', 0),
+]
+
+if os.sep != '/':
+ def platformize(t):
+ return (string.replace(t[0], '/', os.sep), t[1])
+ shorten_list = map(platformize, shorten_list)
+ del platformize
+
+def func_shorten(func_tuple):
+ f = func_tuple[0]
+ for t in shorten_list:
+ i = string.find(f, t[0])
+ if i >= 0:
+ if t[1]:
+ i = i + len(t[0])
+ return (f[i:],)+func_tuple[1:]
+ return func_tuple
+
+
+TraceFP = {}
+if sys.platform == 'win32':
+ TraceDefault = 'con'
+else:
+ TraceDefault = '/dev/tty'
+
+def Trace(msg, file=None, mode='w'):
+ """Write a trace message to a file. Whenever a file is specified,
+ it becomes the default for the next call to Trace()."""
+ global TraceDefault
+ if file is None:
+ file = TraceDefault
+ else:
+ TraceDefault = file
+ try:
+ fp = TraceFP[file]
+ except KeyError:
+ try:
+ fp = TraceFP[file] = open(file, mode)
+ except TypeError:
+ # Assume we were passed an open file pointer.
+ fp = file
+ fp.write(msg)
+ fp.flush()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Defaults.py b/tools/scons/scons-local-1.2.0/SCons/Defaults.py
new file mode 100644
index 000000000..fc0ab26ba
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Defaults.py
@@ -0,0 +1,463 @@
+"""SCons.Defaults
+
+Builders and other things for the local site. Here's where we'll
+duplicate the functionality of autoconf until we move it into the
+installation procedure or use something like qmconf.
+
+The code that reads the registry to find MSVC components was borrowed
+from distutils.msvccompiler.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Defaults.py 3842 2008/12/20 22:59:52 scons"
+
+
+
+import os
+import os.path
+import shutil
+import stat
+import string
+import time
+import types
+import sys
+
+import SCons.Action
+import SCons.Builder
+import SCons.CacheDir
+import SCons.Environment
+import SCons.PathList
+import SCons.Subst
+import SCons.Tool
+
+# A placeholder for a default Environment (for fetching source files
+# from source code management systems and the like). This must be
+# initialized later, after the top-level directory is set by the calling
+# interface.
+_default_env = None
+
+# Lazily instantiate the default environment so the overhead of creating
+# it doesn't apply when it's not needed.
+def _fetch_DefaultEnvironment(*args, **kw):
+ """
+ Returns the already-created default construction environment.
+ """
+ global _default_env
+ return _default_env
+
+def DefaultEnvironment(*args, **kw):
+ """
+ Initial public entry point for creating the default construction
+ Environment.
+
+ After creating the environment, we overwrite our name
+ (DefaultEnvironment) with the _fetch_DefaultEnvironment() function,
+ which more efficiently returns the initialized default construction
+ environment without checking for its existence.
+
+ (This function still exists with its _default_check because someone
+ else (*cough* Script/__init__.py *cough*) may keep a reference
+ to this function. So we can't use the fully functional idiom of
+ having the name originally be a something that *only* creates the
+ construction environment and then overwrites the name.)
+ """
+ global _default_env
+ if not _default_env:
+ import SCons.Util
+ _default_env = apply(SCons.Environment.Environment, args, kw)
+ if SCons.Util.md5:
+ _default_env.Decider('MD5')
+ else:
+ _default_env.Decider('timestamp-match')
+ global DefaultEnvironment
+ DefaultEnvironment = _fetch_DefaultEnvironment
+ _default_env._CacheDir_path = None
+ return _default_env
+
+# Emitters for setting the shared attribute on object files,
+# and an action for checking that all of the source files
+# going into a shared library are, in fact, shared.
+def StaticObjectEmitter(target, source, env):
+ for tgt in target:
+ tgt.attributes.shared = None
+ return (target, source)
+
+def SharedObjectEmitter(target, source, env):
+ for tgt in target:
+ tgt.attributes.shared = 1
+ return (target, source)
+
+def SharedFlagChecker(source, target, env):
+ same = env.subst('$STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME')
+ if same == '0' or same == '' or same == 'False':
+ for src in source:
+ try:
+ shared = src.attributes.shared
+ except AttributeError:
+ shared = None
+ if not shared:
+ raise SCons.Errors.UserError, "Source file: %s is static and is not compatible with shared target: %s" % (src, target[0])
+
+SharedCheck = SCons.Action.Action(SharedFlagChecker, None)
+
+# Some people were using these variable name before we made
+# SourceFileScanner part of the public interface. Don't break their
+# SConscript files until we've given them some fair warning and a
+# transition period.
+CScan = SCons.Tool.CScanner
+DScan = SCons.Tool.DScanner
+LaTeXScan = SCons.Tool.LaTeXScanner
+ObjSourceScan = SCons.Tool.SourceFileScanner
+ProgScan = SCons.Tool.ProgramScanner
+
+# These aren't really tool scanners, so they don't quite belong with
+# the rest of those in Tool/__init__.py, but I'm not sure where else
+# they should go. Leave them here for now.
+import SCons.Scanner.Dir
+DirScanner = SCons.Scanner.Dir.DirScanner()
+DirEntryScanner = SCons.Scanner.Dir.DirEntryScanner()
+
+# Actions for common languages.
+CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR")
+ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR")
+CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR")
+ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR")
+
+ASAction = SCons.Action.Action("$ASCOM", "$ASCOMSTR")
+ASPPAction = SCons.Action.Action("$ASPPCOM", "$ASPPCOMSTR")
+
+LinkAction = SCons.Action.Action("$LINKCOM", "$LINKCOMSTR")
+ShLinkAction = SCons.Action.Action("$SHLINKCOM", "$SHLINKCOMSTR")
+
+LdModuleLinkAction = SCons.Action.Action("$LDMODULECOM", "$LDMODULECOMSTR")
+
+# Common tasks that we allow users to perform in platform-independent
+# ways by creating ActionFactory instances.
+ActionFactory = SCons.Action.ActionFactory
+
+def get_paths_str(dest):
+ # If dest is a list, we need to manually call str() on each element
+ if SCons.Util.is_List(dest):
+ elem_strs = []
+ for element in dest:
+ elem_strs.append('"' + str(element) + '"')
+ return '[' + string.join(elem_strs, ', ') + ']'
+ else:
+ return '"' + str(dest) + '"'
+
+def chmod_func(dest, mode):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if not SCons.Util.is_List(dest):
+ dest = [dest]
+ for element in dest:
+ os.chmod(str(element), mode)
+
+def chmod_strfunc(dest, mode):
+ return 'Chmod(%s, 0%o)' % (get_paths_str(dest), mode)
+
+Chmod = ActionFactory(chmod_func, chmod_strfunc)
+
+def copy_func(dest, src):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if SCons.Util.is_List(src) and os.path.isdir(dest):
+ for file in src:
+ shutil.copy2(file, dest)
+ return 0
+ elif os.path.isfile(src):
+ return shutil.copy2(src, dest)
+ else:
+ return shutil.copytree(src, dest, 1)
+
+Copy = ActionFactory(copy_func,
+ lambda dest, src: 'Copy("%s", "%s")' % (dest, src),
+ convert=str)
+
+def delete_func(dest, must_exist=0):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if not SCons.Util.is_List(dest):
+ dest = [dest]
+ for entry in dest:
+ entry = str(entry)
+ if not must_exist and not os.path.exists(entry):
+ continue
+ if not os.path.exists(entry) or os.path.isfile(entry):
+ os.unlink(entry)
+ continue
+ else:
+ shutil.rmtree(entry, 1)
+ continue
+
+def delete_strfunc(dest, must_exist=0):
+ return 'Delete(%s)' % get_paths_str(dest)
+
+Delete = ActionFactory(delete_func, delete_strfunc)
+
+def mkdir_func(dest):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if not SCons.Util.is_List(dest):
+ dest = [dest]
+ for entry in dest:
+ os.makedirs(str(entry))
+
+Mkdir = ActionFactory(mkdir_func,
+ lambda dir: 'Mkdir(%s)' % get_paths_str(dir))
+
+def move_func(dest, src):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ SCons.Node.FS.invalidate_node_memos(src)
+ os.rename(src, dest)
+
+Move = ActionFactory(move_func,
+ lambda dest, src: 'Move("%s", "%s")' % (dest, src),
+ convert=str)
+
+def touch_func(dest):
+ SCons.Node.FS.invalidate_node_memos(dest)
+ if not SCons.Util.is_List(dest):
+ dest = [dest]
+ for file in dest:
+ file = str(file)
+ mtime = int(time.time())
+ if os.path.exists(file):
+ atime = os.path.getatime(file)
+ else:
+ open(file, 'w')
+ atime = mtime
+ os.utime(file, (atime, mtime))
+
+Touch = ActionFactory(touch_func,
+ lambda file: 'Touch(%s)' % get_paths_str(file))
+
+# Internal utility functions
+
+def _concat(prefix, list, suffix, env, f=lambda x: x, target=None, source=None):
+ """
+ Creates a new list from 'list' by first interpolating each element
+ in the list using the 'env' dictionary and then calling f on the
+ list, and finally calling _concat_ixes to concatenate 'prefix' and
+ 'suffix' onto each element of the list.
+ """
+ if not list:
+ return list
+
+ l = f(SCons.PathList.PathList(list).subst_path(env, target, source))
+ if not l is None:
+ list = l
+
+ return _concat_ixes(prefix, list, suffix, env)
+
+def _concat_ixes(prefix, list, suffix, env):
+ """
+ Creates a new list from 'list' by concatenating the 'prefix' and
+ 'suffix' arguments onto each element of the list. A trailing space
+ on 'prefix' or leading space on 'suffix' will cause them to be put
+ into separate list elements rather than being concatenated.
+ """
+
+ result = []
+
+ # ensure that prefix and suffix are strings
+ prefix = str(env.subst(prefix, SCons.Subst.SUBST_RAW))
+ suffix = str(env.subst(suffix, SCons.Subst.SUBST_RAW))
+
+ for x in list:
+ if isinstance(x, SCons.Node.FS.File):
+ result.append(x)
+ continue
+ x = str(x)
+ if x:
+
+ if prefix:
+ if prefix[-1] == ' ':
+ result.append(prefix[:-1])
+ elif x[:len(prefix)] != prefix:
+ x = prefix + x
+
+ result.append(x)
+
+ if suffix:
+ if suffix[0] == ' ':
+ result.append(suffix[1:])
+ elif x[-len(suffix):] != suffix:
+ result[-1] = result[-1]+suffix
+
+ return result
+
+def _stripixes(prefix, list, suffix, stripprefixes, stripsuffixes, env, c=None):
+ """
+ This is a wrapper around _concat()/_concat_ixes() that checks for the
+ existence of prefixes or suffixes on list elements and strips them
+ where it finds them. This is used by tools (like the GNU linker)
+ that need to turn something like 'libfoo.a' into '-lfoo'.
+ """
+
+ if not list:
+ return list
+
+ if not callable(c):
+ env_c = env['_concat']
+ if env_c != _concat and callable(env_c):
+ # There's a custom _concat() method in the construction
+ # environment, and we've allowed people to set that in
+ # the past (see test/custom-concat.py), so preserve the
+ # backwards compatibility.
+ c = env_c
+ else:
+ c = _concat_ixes
+
+ stripprefixes = map(env.subst, SCons.Util.flatten(stripprefixes))
+ stripsuffixes = map(env.subst, SCons.Util.flatten(stripsuffixes))
+
+ stripped = []
+ for l in SCons.PathList.PathList(list).subst_path(env, None, None):
+ if isinstance(l, SCons.Node.FS.File):
+ stripped.append(l)
+ continue
+
+ if not SCons.Util.is_String(l):
+ l = str(l)
+
+ for stripprefix in stripprefixes:
+ lsp = len(stripprefix)
+ if l[:lsp] == stripprefix:
+ l = l[lsp:]
+ # Do not strip more than one prefix
+ break
+
+ for stripsuffix in stripsuffixes:
+ lss = len(stripsuffix)
+ if l[-lss:] == stripsuffix:
+ l = l[:-lss]
+ # Do not strip more than one suffix
+ break
+
+ stripped.append(l)
+
+ return c(prefix, stripped, suffix, env)
+
+def _defines(prefix, defs, suffix, env, c=_concat_ixes):
+ """A wrapper around _concat_ixes that turns a list or string
+ into a list of C preprocessor command-line definitions.
+ """
+ if SCons.Util.is_List(defs):
+ l = []
+ for d in defs:
+ if SCons.Util.is_List(d) or type(d) is types.TupleType:
+ l.append(str(d[0]) + '=' + str(d[1]))
+ else:
+ l.append(str(d))
+ elif SCons.Util.is_Dict(defs):
+ # The items in a dictionary are stored in random order, but
+ # if the order of the command-line options changes from
+ # invocation to invocation, then the signature of the command
+ # line will change and we'll get random unnecessary rebuilds.
+ # Consequently, we have to sort the keys to ensure a
+ # consistent order...
+ l = []
+ keys = defs.keys()
+ keys.sort()
+ for k in keys:
+ v = defs[k]
+ if v is None:
+ l.append(str(k))
+ else:
+ l.append(str(k) + '=' + str(v))
+ else:
+ l = [str(defs)]
+ return c(prefix, env.subst_path(l), suffix, env)
+
+class NullCmdGenerator:
+ """This is a callable class that can be used in place of other
+ command generators if you don't want them to do anything.
+
+ The __call__ method for this class simply returns the thing
+ you instantiated it with.
+
+ Example usage:
+ env["DO_NOTHING"] = NullCmdGenerator
+ env["LINKCOM"] = "${DO_NOTHING('$LINK $SOURCES $TARGET')}"
+ """
+
+ def __init__(self, cmd):
+ self.cmd = cmd
+
+ def __call__(self, target, source, env, for_signature=None):
+ return self.cmd
+
+class Variable_Method_Caller:
+ """A class for finding a construction variable on the stack and
+ calling one of its methods.
+
+ We use this to support "construction variables" in our string
+ eval()s that actually stand in for methods--specifically, use
+ of "RDirs" in call to _concat that should actually execute the
+ "TARGET.RDirs" method. (We used to support this by creating a little
+ "build dictionary" that mapped RDirs to the method, but this got in
+ the way of Memoizing construction environments, because we had to
+ create new environment objects to hold the variables.)
+ """
+ def __init__(self, variable, method):
+ self.variable = variable
+ self.method = method
+ def __call__(self, *args, **kw):
+ try: 1/0
+ except ZeroDivisionError:
+ # Don't start iterating with the current stack-frame to
+ # prevent creating reference cycles (f_back is safe).
+ frame = sys.exc_info()[2].tb_frame.f_back
+ variable = self.variable
+ while frame:
+ if frame.f_locals.has_key(variable):
+ v = frame.f_locals[variable]
+ if v:
+ method = getattr(v, self.method)
+ return apply(method, args, kw)
+ frame = frame.f_back
+ return None
+
+ConstructionEnvironment = {
+ 'BUILDERS' : {},
+ 'SCANNERS' : [],
+ 'CONFIGUREDIR' : '#/.sconf_temp',
+ 'CONFIGURELOG' : '#/config.log',
+ 'CPPSUFFIXES' : SCons.Tool.CSuffixes,
+ 'DSUFFIXES' : SCons.Tool.DSuffixes,
+ 'ENV' : {},
+ 'IDLSUFFIXES' : SCons.Tool.IDLSuffixes,
+ 'LATEXSUFFIXES' : SCons.Tool.LaTeXSuffixes,
+ '_concat' : _concat,
+ '_defines' : _defines,
+ '_stripixes' : _stripixes,
+ '_LIBFLAGS' : '${_concat(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, __env__)}',
+ '_LIBDIRFLAGS' : '$( ${_concat(LIBDIRPREFIX, LIBPATH, LIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
+ '_CPPINCFLAGS' : '$( ${_concat(INCPREFIX, CPPPATH, INCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)',
+ '_CPPDEFFLAGS' : '${_defines(CPPDEFPREFIX, CPPDEFINES, CPPDEFSUFFIX, __env__)}',
+ 'TEMPFILE' : NullCmdGenerator,
+ 'Dir' : Variable_Method_Caller('TARGET', 'Dir'),
+ 'Dirs' : Variable_Method_Caller('TARGET', 'Dirs'),
+ 'File' : Variable_Method_Caller('TARGET', 'File'),
+ 'RDirs' : Variable_Method_Caller('TARGET', 'RDirs'),
+}
diff --git a/tools/scons/scons-local-1.2.0/SCons/Environment.py b/tools/scons/scons-local-1.2.0/SCons/Environment.py
new file mode 100644
index 000000000..e1a8ec2c6
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Environment.py
@@ -0,0 +1,2300 @@
+"""SCons.Environment
+
+Base class for construction Environments. These are
+the primary objects used to communicate dependency and
+construction information to the build engine.
+
+Keyword arguments supplied when the construction Environment
+is created are construction variables used to initialize the
+Environment
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Environment.py 3842 2008/12/20 22:59:52 scons"
+
+
+import copy
+import os
+import sys
+import re
+import shlex
+import string
+from UserDict import UserDict
+
+import SCons.Action
+import SCons.Builder
+from SCons.Debug import logInstanceCreation
+import SCons.Defaults
+import SCons.Errors
+import SCons.Memoize
+import SCons.Node
+import SCons.Node.Alias
+import SCons.Node.FS
+import SCons.Node.Python
+import SCons.Platform
+import SCons.SConsign
+import SCons.Subst
+import SCons.Tool
+import SCons.Util
+import SCons.Warnings
+
+class _Null:
+ pass
+
+_null = _Null
+
+_warn_copy_deprecated = True
+_warn_source_signatures_deprecated = True
+_warn_target_signatures_deprecated = True
+
+CleanTargets = {}
+CalculatorArgs = {}
+
+semi_deepcopy = SCons.Util.semi_deepcopy
+
+# Pull UserError into the global name space for the benefit of
+# Environment().SourceSignatures(), which has some import statements
+# which seem to mess up its ability to reference SCons directly.
+UserError = SCons.Errors.UserError
+
+def alias_builder(env, target, source):
+ pass
+
+AliasBuilder = SCons.Builder.Builder(action = alias_builder,
+ target_factory = SCons.Node.Alias.default_ans.Alias,
+ source_factory = SCons.Node.FS.Entry,
+ multi = 1,
+ is_explicit = None,
+ name='AliasBuilder')
+
+def apply_tools(env, tools, toolpath):
+ # Store the toolpath in the Environment.
+ if toolpath is not None:
+ env['toolpath'] = toolpath
+
+ if not tools:
+ return
+ # Filter out null tools from the list.
+ for tool in filter(None, tools):
+ if SCons.Util.is_List(tool) or type(tool)==type(()):
+ toolname = tool[0]
+ toolargs = tool[1] # should be a dict of kw args
+ tool = apply(env.Tool, [toolname], toolargs)
+ else:
+ env.Tool(tool)
+
+# These names are (or will be) controlled by SCons; users should never
+# set or override them. This warning can optionally be turned off,
+# but scons will still ignore the illegal variable names even if it's off.
+reserved_construction_var_names = [
+ 'SOURCE',
+ 'SOURCES',
+ 'TARGET',
+ 'TARGETS',
+]
+
+future_reserved_construction_var_names = [
+ 'CHANGED_SOURCES',
+ 'CHANGED_TARGETS',
+ 'UNCHANGED_SOURCES',
+ 'UNCHANGED_TARGETS',
+]
+
+def copy_non_reserved_keywords(dict):
+ result = semi_deepcopy(dict)
+ for k in result.keys():
+ if k in reserved_construction_var_names:
+ msg = "Ignoring attempt to set reserved variable `$%s'"
+ SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % k)
+ del result[k]
+ return result
+
+def _set_reserved(env, key, value):
+ msg = "Ignoring attempt to set reserved variable `$%s'"
+ SCons.Warnings.warn(SCons.Warnings.ReservedVariableWarning, msg % key)
+
+def _set_future_reserved(env, key, value):
+ env._dict[key] = value
+ msg = "`$%s' will be reserved in a future release and setting it will become ignored"
+ SCons.Warnings.warn(SCons.Warnings.FutureReservedVariableWarning, msg % key)
+
+def _set_BUILDERS(env, key, value):
+ try:
+ bd = env._dict[key]
+ for k in bd.keys():
+ del bd[k]
+ except KeyError:
+ bd = BuilderDict(kwbd, env)
+ env._dict[key] = bd
+ bd.update(value)
+
+def _del_SCANNERS(env, key):
+ del env._dict[key]
+ env.scanner_map_delete()
+
+def _set_SCANNERS(env, key, value):
+ env._dict[key] = value
+ env.scanner_map_delete()
+
+def _delete_duplicates(l, keep_last):
+ """Delete duplicates from a sequence, keeping the first or last."""
+ seen={}
+ result=[]
+ if keep_last: # reverse in & out, then keep first
+ l.reverse()
+ for i in l:
+ try:
+ if not seen.has_key(i):
+ result.append(i)
+ seen[i]=1
+ except TypeError:
+ # probably unhashable. Just keep it.
+ result.append(i)
+ if keep_last:
+ result.reverse()
+ return result
+
+
+
+# The following is partly based on code in a comment added by Peter
+# Shannon at the following page (there called the "transplant" class):
+#
+# ASPN : Python Cookbook : Dynamically added methods to a class
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732
+#
+# We had independently been using the idiom as BuilderWrapper, but
+# factoring out the common parts into this base class, and making
+# BuilderWrapper a subclass that overrides __call__() to enforce specific
+# Builder calling conventions, simplified some of our higher-layer code.
+
+class MethodWrapper:
+ """
+ A generic Wrapper class that associates a method (which can
+ actually be any callable) with an object. As part of creating this
+ MethodWrapper object an attribute with the specified (by default,
+ the name of the supplied method) is added to the underlying object.
+ When that new "method" is called, our __call__() method adds the
+ object as the first argument, simulating the Python behavior of
+ supplying "self" on method calls.
+
+ We hang on to the name by which the method was added to the underlying
+ base class so that we can provide a method to "clone" ourselves onto
+ a new underlying object being copied (without which we wouldn't need
+ to save that info).
+ """
+ def __init__(self, object, method, name=None):
+ if name is None:
+ name = method.__name__
+ self.object = object
+ self.method = method
+ self.name = name
+ setattr(self.object, name, self)
+
+ def __call__(self, *args, **kwargs):
+ nargs = (self.object,) + args
+ return apply(self.method, nargs, kwargs)
+
+ def clone(self, new_object):
+ """
+ Returns an object that re-binds the underlying "method" to
+ the specified new object.
+ """
+ return self.__class__(new_object, self.method, self.name)
+
+class BuilderWrapper(MethodWrapper):
+ """
+ A MethodWrapper subclass that that associates an environment with
+ a Builder.
+
+ This mainly exists to wrap the __call__() function so that all calls
+ to Builders can have their argument lists massaged in the same way
+ (treat a lone argument as the source, treat two arguments as target
+ then source, make sure both target and source are lists) without
+ having to have cut-and-paste code to do it.
+
+ As a bit of obsessive backwards compatibility, we also intercept
+ attempts to get or set the "env" or "builder" attributes, which were
+ the names we used before we put the common functionality into the
+ MethodWrapper base class. We'll keep this around for a while in case
+ people shipped Tool modules that reached into the wrapper (like the
+ Tool/qt.py module does, or did). There shouldn't be a lot attribute
+ fetching or setting on these, so a little extra work shouldn't hurt.
+ """
+ def __call__(self, target=None, source=_null, *args, **kw):
+ if source is _null:
+ source = target
+ target = None
+ if not target is None and not SCons.Util.is_List(target):
+ target = [target]
+ if not source is None and not SCons.Util.is_List(source):
+ source = [source]
+ return apply(MethodWrapper.__call__, (self, target, source) + args, kw)
+
+ def __repr__(self):
+ return '<BuilderWrapper %s>' % repr(self.name)
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __getattr__(self, name):
+ if name == 'env':
+ return self.object
+ elif name == 'builder':
+ return self.method
+ else:
+ raise AttributeError, name
+
+ def __setattr__(self, name, value):
+ if name == 'env':
+ self.object = value
+ elif name == 'builder':
+ self.method = value
+ else:
+ self.__dict__[name] = value
+
+ # This allows a Builder to be executed directly
+ # through the Environment to which it's attached.
+ # In practice, we shouldn't need this, because
+ # builders actually get executed through a Node.
+ # But we do have a unit test for this, and can't
+ # yet rule out that it would be useful in the
+ # future, so leave it for now.
+ #def execute(self, **kw):
+ # kw['env'] = self.env
+ # apply(self.builder.execute, (), kw)
+
+class BuilderDict(UserDict):
+ """This is a dictionary-like class used by an Environment to hold
+ the Builders. We need to do this because every time someone changes
+ the Builders in the Environment's BUILDERS dictionary, we must
+ update the Environment's attributes."""
+ def __init__(self, dict, env):
+ # Set self.env before calling the superclass initialization,
+ # because it will end up calling our other methods, which will
+ # need to point the values in this dictionary to self.env.
+ self.env = env
+ UserDict.__init__(self, dict)
+
+ def __semi_deepcopy__(self):
+ return self.__class__(self.data, self.env)
+
+ def __setitem__(self, item, val):
+ try:
+ method = getattr(self.env, item).method
+ except AttributeError:
+ pass
+ else:
+ self.env.RemoveMethod(method)
+ UserDict.__setitem__(self, item, val)
+ BuilderWrapper(self.env, val, item)
+
+ def __delitem__(self, item):
+ UserDict.__delitem__(self, item)
+ delattr(self.env, item)
+
+ def update(self, dict):
+ for i, v in dict.items():
+ self.__setitem__(i, v)
+
+
+
+_is_valid_var = re.compile(r'[_a-zA-Z]\w*$')
+
+def is_valid_construction_var(varstr):
+ """Return if the specified string is a legitimate construction
+ variable.
+ """
+ return _is_valid_var.match(varstr)
+
+
+
+class SubstitutionEnvironment:
+ """Base class for different flavors of construction environments.
+
+ This class contains a minimal set of methods that handle contruction
+ variable expansion and conversion of strings to Nodes, which may or
+ may not be actually useful as a stand-alone class. Which methods
+ ended up in this class is pretty arbitrary right now. They're
+ basically the ones which we've empirically determined are common to
+ the different construction environment subclasses, and most of the
+ others that use or touch the underlying dictionary of construction
+ variables.
+
+ Eventually, this class should contain all the methods that we
+ determine are necessary for a "minimal" interface to the build engine.
+ A full "native Python" SCons environment has gotten pretty heavyweight
+ with all of the methods and Tools and construction variables we've
+ jammed in there, so it would be nice to have a lighter weight
+ alternative for interfaces that don't need all of the bells and
+ whistles. (At some point, we'll also probably rename this class
+ "Base," since that more reflects what we want this class to become,
+ but because we've released comments that tell people to subclass
+ Environment.Base to create their own flavors of construction
+ environment, we'll save that for a future refactoring when this
+ class actually becomes useful.)
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ def __init__(self, **kw):
+ """Initialization of an underlying SubstitutionEnvironment class.
+ """
+ if __debug__: logInstanceCreation(self, 'Environment.SubstitutionEnvironment')
+ self.fs = SCons.Node.FS.get_default_fs()
+ self.ans = SCons.Node.Alias.default_ans
+ self.lookup_list = SCons.Node.arg2nodes_lookups
+ self._dict = kw.copy()
+ self._init_special()
+ self.added_methods = []
+ #self._memo = {}
+
+ def _init_special(self):
+ """Initial the dispatch tables for special handling of
+ special construction variables."""
+ self._special_del = {}
+ self._special_del['SCANNERS'] = _del_SCANNERS
+
+ self._special_set = {}
+ for key in reserved_construction_var_names:
+ self._special_set[key] = _set_reserved
+ for key in future_reserved_construction_var_names:
+ self._special_set[key] = _set_future_reserved
+ self._special_set['BUILDERS'] = _set_BUILDERS
+ self._special_set['SCANNERS'] = _set_SCANNERS
+
+ # Freeze the keys of self._special_set in a list for use by
+ # methods that need to check. (Empirically, list scanning has
+ # gotten better than dict.has_key() in Python 2.5.)
+ self._special_set_keys = self._special_set.keys()
+
+ def __cmp__(self, other):
+ return cmp(self._dict, other._dict)
+
+ def __delitem__(self, key):
+ special = self._special_del.get(key)
+ if special:
+ special(self, key)
+ else:
+ del self._dict[key]
+
+ def __getitem__(self, key):
+ return self._dict[key]
+
+ def __setitem__(self, key, value):
+ # This is heavily used. This implementation is the best we have
+ # according to the timings in bench/env.__setitem__.py.
+ #
+ # The "key in self._special_set_keys" test here seems to perform
+ # pretty well for the number of keys we have. A hard-coded
+ # list works a little better in Python 2.5, but that has the
+ # disadvantage of maybe getting out of sync if we ever add more
+ # variable names. Using self._special_set.has_key() works a
+ # little better in Python 2.4, but is worse then this test.
+ # So right now it seems like a good trade-off, but feel free to
+ # revisit this with bench/env.__setitem__.py as needed (and
+ # as newer versions of Python come out).
+ if key in self._special_set_keys:
+ self._special_set[key](self, key, value)
+ else:
+ # If we already have the entry, then it's obviously a valid
+ # key and we don't need to check. If we do check, using a
+ # global, pre-compiled regular expression directly is more
+ # efficient than calling another function or a method.
+ if not self._dict.has_key(key) \
+ and not _is_valid_var.match(key):
+ raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key
+ self._dict[key] = value
+
+ def get(self, key, default=None):
+ "Emulates the get() method of dictionaries."""
+ return self._dict.get(key, default)
+
+ def has_key(self, key):
+ return self._dict.has_key(key)
+
+ def __contains__(self, key):
+ return self._dict.__contains__(key)
+
+ def items(self):
+ return self._dict.items()
+
+ def arg2nodes(self, args, node_factory=_null, lookup_list=_null, **kw):
+ if node_factory is _null:
+ node_factory = self.fs.File
+ if lookup_list is _null:
+ lookup_list = self.lookup_list
+
+ if not args:
+ return []
+
+ args = SCons.Util.flatten(args)
+
+ nodes = []
+ for v in args:
+ if SCons.Util.is_String(v):
+ n = None
+ for l in lookup_list:
+ n = l(v)
+ if not n is None:
+ break
+ if not n is None:
+ if SCons.Util.is_String(n):
+ # n = self.subst(n, raw=1, **kw)
+ kw['raw'] = 1
+ n = apply(self.subst, (n,), kw)
+ if node_factory:
+ n = node_factory(n)
+ if SCons.Util.is_List(n):
+ nodes.extend(n)
+ else:
+ nodes.append(n)
+ elif node_factory:
+ # v = node_factory(self.subst(v, raw=1, **kw))
+ kw['raw'] = 1
+ v = node_factory(apply(self.subst, (v,), kw))
+ if SCons.Util.is_List(v):
+ nodes.extend(v)
+ else:
+ nodes.append(v)
+ else:
+ nodes.append(v)
+
+ return nodes
+
+ def gvars(self):
+ return self._dict
+
+ def lvars(self):
+ return {}
+
+ def subst(self, string, raw=0, target=None, source=None, conv=None):
+ """Recursively interpolates construction variables from the
+ Environment into the specified string, returning the expanded
+ result. Construction variables are specified by a $ prefix
+ in the string and begin with an initial underscore or
+ alphabetic character followed by any number of underscores
+ or alphanumeric characters. The construction variable names
+ may be surrounded by curly braces to separate the name from
+ trailing characters.
+ """
+ gvars = self.gvars()
+ lvars = self.lvars()
+ lvars['__env__'] = self
+ return SCons.Subst.scons_subst(string, self, raw, target, source, gvars, lvars, conv)
+
+ def subst_kw(self, kw, raw=0, target=None, source=None):
+ nkw = {}
+ for k, v in kw.items():
+ k = self.subst(k, raw, target, source)
+ if SCons.Util.is_String(v):
+ v = self.subst(v, raw, target, source)
+ nkw[k] = v
+ return nkw
+
+ def subst_list(self, string, raw=0, target=None, source=None, conv=None):
+ """Calls through to SCons.Subst.scons_subst_list(). See
+ the documentation for that function."""
+ gvars = self.gvars()
+ lvars = self.lvars()
+ lvars['__env__'] = self
+ return SCons.Subst.scons_subst_list(string, self, raw, target, source, gvars, lvars, conv)
+
+ def subst_path(self, path, target=None, source=None):
+ """Substitute a path list, turning EntryProxies into Nodes
+ and leaving Nodes (and other objects) as-is."""
+
+ if not SCons.Util.is_List(path):
+ path = [path]
+
+ def s(obj):
+ """This is the "string conversion" routine that we have our
+ substitutions use to return Nodes, not strings. This relies
+ on the fact that an EntryProxy object has a get() method that
+ returns the underlying Node that it wraps, which is a bit of
+ architectural dependence that we might need to break or modify
+ in the future in response to additional requirements."""
+ try:
+ get = obj.get
+ except AttributeError:
+ obj = SCons.Util.to_String_for_subst(obj)
+ else:
+ obj = get()
+ return obj
+
+ r = []
+ for p in path:
+ if SCons.Util.is_String(p):
+ p = self.subst(p, target=target, source=source, conv=s)
+ if SCons.Util.is_List(p):
+ if len(p) == 1:
+ p = p[0]
+ else:
+ # We have an object plus a string, or multiple
+ # objects that we need to smush together. No choice
+ # but to make them into a string.
+ p = string.join(map(SCons.Util.to_String_for_subst, p), '')
+ else:
+ p = s(p)
+ r.append(p)
+ return r
+
+ subst_target_source = subst
+
+ def backtick(self, command):
+ import subprocess
+ # common arguments
+ kw = { 'stdin' : 'devnull',
+ 'stdout' : subprocess.PIPE,
+ 'stderr' : subprocess.PIPE,
+ 'universal_newlines' : True,
+ }
+ # if the command is a list, assume it's been quoted
+ # othewise force a shell
+ if not SCons.Util.is_List(command): kw['shell'] = True
+ # run constructed command
+ #TODO(1.5) p = SCons.Action._subproc(self, command, **kw)
+ p = apply(SCons.Action._subproc, (self, command), kw)
+ out,err = p.communicate()
+ status = p.wait()
+ if err:
+ sys.stderr.write(err)
+ if status:
+ raise OSError("'%s' exited %d" % (command, status))
+ return out
+
+ def AddMethod(self, function, name=None):
+ """
+ Adds the specified function as a method of this construction
+ environment with the specified name. If the name is omitted,
+ the default name is the name of the function itself.
+ """
+ method = MethodWrapper(self, function, name)
+ self.added_methods.append(method)
+
+ def RemoveMethod(self, function):
+ """
+ Removes the specified function's MethodWrapper from the
+ added_methods list, so we don't re-bind it when making a clone.
+ """
+ is_not_func = lambda dm, f=function: not dm.method is f
+ self.added_methods = filter(is_not_func, self.added_methods)
+
+ def Override(self, overrides):
+ """
+ Produce a modified environment whose variables are overriden by
+ the overrides dictionaries. "overrides" is a dictionary that
+ will override the variables of this environment.
+
+ This function is much more efficient than Clone() or creating
+ a new Environment because it doesn't copy the construction
+ environment dictionary, it just wraps the underlying construction
+ environment, and doesn't even create a wrapper object if there
+ are no overrides.
+ """
+ if not overrides: return self
+ o = copy_non_reserved_keywords(overrides)
+ if not o: return self
+ overrides = {}
+ merges = None
+ for key, value in o.items():
+ if key == 'parse_flags':
+ merges = value
+ else:
+ overrides[key] = SCons.Subst.scons_subst_once(value, self, key)
+ env = OverrideEnvironment(self, overrides)
+ if merges: env.MergeFlags(merges)
+ return env
+
+ def ParseFlags(self, *flags):
+ """
+ Parse the set of flags and return a dict with the flags placed
+ in the appropriate entry. The flags are treated as a typical
+ set of command-line flags for a GNU-like toolchain and used to
+ populate the entries in the dict immediately below. If one of
+ the flag strings begins with a bang (exclamation mark), it is
+ assumed to be a command and the rest of the string is executed;
+ the result of that evaluation is then added to the dict.
+ """
+ dict = {
+ 'ASFLAGS' : SCons.Util.CLVar(''),
+ 'CFLAGS' : SCons.Util.CLVar(''),
+ 'CCFLAGS' : SCons.Util.CLVar(''),
+ 'CPPDEFINES' : [],
+ 'CPPFLAGS' : SCons.Util.CLVar(''),
+ 'CPPPATH' : [],
+ 'FRAMEWORKPATH' : SCons.Util.CLVar(''),
+ 'FRAMEWORKS' : SCons.Util.CLVar(''),
+ 'LIBPATH' : [],
+ 'LIBS' : [],
+ 'LINKFLAGS' : SCons.Util.CLVar(''),
+ 'RPATH' : [],
+ }
+
+ # The use of the "me" parameter to provide our own name for
+ # recursion is an egregious hack to support Python 2.1 and before.
+ def do_parse(arg, me, self = self, dict = dict):
+ # if arg is a sequence, recurse with each element
+ if not arg:
+ return
+
+ if not SCons.Util.is_String(arg):
+ for t in arg: me(t, me)
+ return
+
+ # if arg is a command, execute it
+ if arg[0] == '!':
+ arg = self.backtick(arg[1:])
+
+ # utility function to deal with -D option
+ def append_define(name, dict = dict):
+ t = string.split(name, '=')
+ if len(t) == 1:
+ dict['CPPDEFINES'].append(name)
+ else:
+ dict['CPPDEFINES'].append([t[0], string.join(t[1:], '=')])
+
+ # Loop through the flags and add them to the appropriate option.
+ # This tries to strike a balance between checking for all possible
+ # flags and keeping the logic to a finite size, so it doesn't
+ # check for some that don't occur often. It particular, if the
+ # flag is not known to occur in a config script and there's a way
+ # of passing the flag to the right place (by wrapping it in a -W
+ # flag, for example) we don't check for it. Note that most
+ # preprocessor options are not handled, since unhandled options
+ # are placed in CCFLAGS, so unless the preprocessor is invoked
+ # separately, these flags will still get to the preprocessor.
+ # Other options not currently handled:
+ # -iqoutedir (preprocessor search path)
+ # -u symbol (linker undefined symbol)
+ # -s (linker strip files)
+ # -static* (linker static binding)
+ # -shared* (linker dynamic binding)
+ # -symbolic (linker global binding)
+ # -R dir (deprecated linker rpath)
+ # IBM compilers may also accept -qframeworkdir=foo
+
+ params = shlex.split(arg)
+ append_next_arg_to = None # for multi-word args
+ for arg in params:
+ if append_next_arg_to:
+ if append_next_arg_to == 'CPPDEFINES':
+ append_define(arg)
+ elif append_next_arg_to == '-include':
+ t = ('-include', self.fs.File(arg))
+ dict['CCFLAGS'].append(t)
+ elif append_next_arg_to == '-isysroot':
+ t = ('-isysroot', arg)
+ dict['CCFLAGS'].append(t)
+ dict['LINKFLAGS'].append(t)
+ elif append_next_arg_to == '-arch':
+ t = ('-arch', arg)
+ dict['CCFLAGS'].append(t)
+ dict['LINKFLAGS'].append(t)
+ else:
+ dict[append_next_arg_to].append(arg)
+ append_next_arg_to = None
+ elif not arg[0] in ['-', '+']:
+ dict['LIBS'].append(self.fs.File(arg))
+ elif arg[:2] == '-L':
+ if arg[2:]:
+ dict['LIBPATH'].append(arg[2:])
+ else:
+ append_next_arg_to = 'LIBPATH'
+ elif arg[:2] == '-l':
+ if arg[2:]:
+ dict['LIBS'].append(arg[2:])
+ else:
+ append_next_arg_to = 'LIBS'
+ elif arg[:2] == '-I':
+ if arg[2:]:
+ dict['CPPPATH'].append(arg[2:])
+ else:
+ append_next_arg_to = 'CPPPATH'
+ elif arg[:4] == '-Wa,':
+ dict['ASFLAGS'].append(arg[4:])
+ dict['CCFLAGS'].append(arg)
+ elif arg[:4] == '-Wl,':
+ if arg[:11] == '-Wl,-rpath=':
+ dict['RPATH'].append(arg[11:])
+ elif arg[:7] == '-Wl,-R,':
+ dict['RPATH'].append(arg[7:])
+ elif arg[:6] == '-Wl,-R':
+ dict['RPATH'].append(arg[6:])
+ else:
+ dict['LINKFLAGS'].append(arg)
+ elif arg[:4] == '-Wp,':
+ dict['CPPFLAGS'].append(arg)
+ elif arg[:2] == '-D':
+ if arg[2:]:
+ append_define(arg[2:])
+ else:
+ append_next_arg_to = 'CPPDEFINES'
+ elif arg == '-framework':
+ append_next_arg_to = 'FRAMEWORKS'
+ elif arg[:14] == '-frameworkdir=':
+ dict['FRAMEWORKPATH'].append(arg[14:])
+ elif arg[:2] == '-F':
+ if arg[2:]:
+ dict['FRAMEWORKPATH'].append(arg[2:])
+ else:
+ append_next_arg_to = 'FRAMEWORKPATH'
+ elif arg == '-mno-cygwin':
+ dict['CCFLAGS'].append(arg)
+ dict['LINKFLAGS'].append(arg)
+ elif arg == '-mwindows':
+ dict['LINKFLAGS'].append(arg)
+ elif arg == '-pthread':
+ dict['CCFLAGS'].append(arg)
+ dict['LINKFLAGS'].append(arg)
+ elif arg[:5] == '-std=':
+ dict['CFLAGS'].append(arg) # C only
+ elif arg[0] == '+':
+ dict['CCFLAGS'].append(arg)
+ dict['LINKFLAGS'].append(arg)
+ elif arg in ['-include', '-isysroot', '-arch']:
+ append_next_arg_to = arg
+ else:
+ dict['CCFLAGS'].append(arg)
+
+ for arg in flags:
+ do_parse(arg, do_parse)
+ return dict
+
+ def MergeFlags(self, args, unique=1, dict=None):
+ """
+ Merge the dict in args into the construction variables of this
+ env, or the passed-in dict. If args is not a dict, it is
+ converted into a dict using ParseFlags. If unique is not set,
+ the flags are appended rather than merged.
+ """
+
+ if dict is None:
+ dict = self
+ if not SCons.Util.is_Dict(args):
+ args = self.ParseFlags(args)
+ if not unique:
+ apply(self.Append, (), args)
+ return self
+ for key, value in args.items():
+ if not value:
+ continue
+ try:
+ orig = self[key]
+ except KeyError:
+ orig = value
+ else:
+ if not orig:
+ orig = value
+ elif value:
+ # Add orig and value. The logic here was lifted from
+ # part of env.Append() (see there for a lot of comments
+ # about the order in which things are tried) and is
+ # used mainly to handle coercion of strings to CLVar to
+ # "do the right thing" given (e.g.) an original CCFLAGS
+ # string variable like '-pipe -Wall'.
+ try:
+ orig = orig + value
+ except (KeyError, TypeError):
+ try:
+ add_to_orig = orig.append
+ except AttributeError:
+ value.insert(0, orig)
+ orig = value
+ else:
+ add_to_orig(value)
+ t = []
+ if key[-4:] == 'PATH':
+ ### keep left-most occurence
+ for v in orig:
+ if v not in t:
+ t.append(v)
+ else:
+ ### keep right-most occurence
+ orig.reverse()
+ for v in orig:
+ if v not in t:
+ t.insert(0, v)
+ self[key] = t
+ return self
+
+# def MergeShellPaths(self, args, prepend=1):
+# """
+# Merge the dict in args into the shell environment in env['ENV'].
+# Shell path elements are appended or prepended according to prepend.
+
+# Uses Pre/AppendENVPath, so it always appends or prepends uniquely.
+
+# Example: env.MergeShellPaths({'LIBPATH': '/usr/local/lib'})
+# prepends /usr/local/lib to env['ENV']['LIBPATH'].
+# """
+
+# for pathname, pathval in args.items():
+# if not pathval:
+# continue
+# if prepend:
+# apply(self.PrependENVPath, (pathname, pathval))
+# else:
+# apply(self.AppendENVPath, (pathname, pathval))
+
+
+# Used by the FindSourceFiles() method, below.
+# Stuck here for support of pre-2.2 Python versions.
+def build_source(ss, result):
+ for s in ss:
+ if isinstance(s, SCons.Node.FS.Dir):
+ build_source(s.all_children(), result)
+ elif s.has_builder():
+ build_source(s.sources, result)
+ elif isinstance(s.disambiguate(), SCons.Node.FS.File):
+ result.append(s)
+
+def default_decide_source(dependency, target, prev_ni):
+ f = SCons.Defaults.DefaultEnvironment().decide_source
+ return f(dependency, target, prev_ni)
+
+def default_decide_target(dependency, target, prev_ni):
+ f = SCons.Defaults.DefaultEnvironment().decide_target
+ return f(dependency, target, prev_ni)
+
+def default_copy_from_cache(src, dst):
+ f = SCons.Defaults.DefaultEnvironment().copy_from_cache
+ return f(src, dst)
+
+class Base(SubstitutionEnvironment):
+ """Base class for "real" construction Environments. These are the
+ primary objects used to communicate dependency and construction
+ information to the build engine.
+
+ Keyword arguments supplied when the construction Environment
+ is created are construction variables used to initialize the
+ Environment.
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ #######################################################################
+ # This is THE class for interacting with the SCons build engine,
+ # and it contains a lot of stuff, so we're going to try to keep this
+ # a little organized by grouping the methods.
+ #######################################################################
+
+ #######################################################################
+ # Methods that make an Environment act like a dictionary. These have
+ # the expected standard names for Python mapping objects. Note that
+ # we don't actually make an Environment a subclass of UserDict for
+ # performance reasons. Note also that we only supply methods for
+ # dictionary functionality that we actually need and use.
+ #######################################################################
+
+ def __init__(self,
+ platform=None,
+ tools=None,
+ toolpath=None,
+ variables=None,
+ parse_flags = None,
+ **kw):
+ """
+ Initialization of a basic SCons construction environment,
+ including setting up special construction variables like BUILDER,
+ PLATFORM, etc., and searching for and applying available Tools.
+
+ Note that we do *not* call the underlying base class
+ (SubsitutionEnvironment) initialization, because we need to
+ initialize things in a very specific order that doesn't work
+ with the much simpler base class initialization.
+ """
+ if __debug__: logInstanceCreation(self, 'Environment.Base')
+ self._memo = {}
+ self.fs = SCons.Node.FS.get_default_fs()
+ self.ans = SCons.Node.Alias.default_ans
+ self.lookup_list = SCons.Node.arg2nodes_lookups
+ self._dict = semi_deepcopy(SCons.Defaults.ConstructionEnvironment)
+ self._init_special()
+ self.added_methods = []
+
+ # We don't use AddMethod, or define these as methods in this
+ # class, because we *don't* want these functions to be bound
+ # methods. They need to operate independently so that the
+ # settings will work properly regardless of whether a given
+ # target ends up being built with a Base environment or an
+ # OverrideEnvironment or what have you.
+ self.decide_target = default_decide_target
+ self.decide_source = default_decide_source
+
+ self.copy_from_cache = default_copy_from_cache
+
+ self._dict['BUILDERS'] = BuilderDict(self._dict['BUILDERS'], self)
+
+ if platform is None:
+ platform = self._dict.get('PLATFORM', None)
+ if platform is None:
+ platform = SCons.Platform.Platform()
+ if SCons.Util.is_String(platform):
+ platform = SCons.Platform.Platform(platform)
+ self._dict['PLATFORM'] = str(platform)
+ platform(self)
+
+ # Apply the passed-in and customizable variables to the
+ # environment before calling the tools, because they may use
+ # some of them during initialization.
+ if kw.has_key('options'):
+ # Backwards compatibility: they may stll be using the
+ # old "options" keyword.
+ variables = kw['options']
+ del kw['options']
+ apply(self.Replace, (), kw)
+ keys = kw.keys()
+ if variables:
+ keys = keys + variables.keys()
+ variables.Update(self)
+
+ save = {}
+ for k in keys:
+ try:
+ save[k] = self._dict[k]
+ except KeyError:
+ # No value may have been set if they tried to pass in a
+ # reserved variable name like TARGETS.
+ pass
+
+ SCons.Tool.Initializers(self)
+
+ if tools is None:
+ tools = self._dict.get('TOOLS', None)
+ if tools is None:
+ tools = ['default']
+ apply_tools(self, tools, toolpath)
+
+ # Now restore the passed-in and customized variables
+ # to the environment, since the values the user set explicitly
+ # should override any values set by the tools.
+ for key, val in save.items():
+ self._dict[key] = val
+
+ # Finally, apply any flags to be merged in
+ if parse_flags: self.MergeFlags(parse_flags)
+
+ #######################################################################
+ # Utility methods that are primarily for internal use by SCons.
+ # These begin with lower-case letters.
+ #######################################################################
+
+ def get_builder(self, name):
+ """Fetch the builder with the specified name from the environment.
+ """
+ try:
+ return self._dict['BUILDERS'][name]
+ except KeyError:
+ return None
+
+ def get_CacheDir(self):
+ try:
+ path = self._CacheDir_path
+ except AttributeError:
+ path = SCons.Defaults.DefaultEnvironment()._CacheDir_path
+ try:
+ if path == self._last_CacheDir_path:
+ return self._last_CacheDir
+ except AttributeError:
+ pass
+ cd = SCons.CacheDir.CacheDir(path)
+ self._last_CacheDir_path = path
+ self._last_CacheDir = cd
+ return cd
+
+ def get_factory(self, factory, default='File'):
+ """Return a factory function for creating Nodes for this
+ construction environment.
+ """
+ name = default
+ try:
+ is_node = issubclass(factory, SCons.Node.Node)
+ except TypeError:
+ # The specified factory isn't a Node itself--it's
+ # most likely None, or possibly a callable.
+ pass
+ else:
+ if is_node:
+ # The specified factory is a Node (sub)class. Try to
+ # return the FS method that corresponds to the Node's
+ # name--that is, we return self.fs.Dir if they want a Dir,
+ # self.fs.File for a File, etc.
+ try: name = factory.__name__
+ except AttributeError: pass
+ else: factory = None
+ if not factory:
+ # They passed us None, or we picked up a name from a specified
+ # class, so return the FS method. (Note that we *don't*
+ # use our own self.{Dir,File} methods because that would
+ # cause env.subst() to be called twice on the file name,
+ # interfering with files that have $$ in them.)
+ factory = getattr(self.fs, name)
+ return factory
+
+ memoizer_counters.append(SCons.Memoize.CountValue('_gsm'))
+
+ def _gsm(self):
+ try:
+ return self._memo['_gsm']
+ except KeyError:
+ pass
+
+ result = {}
+
+ try:
+ scanners = self._dict['SCANNERS']
+ except KeyError:
+ pass
+ else:
+ # Reverse the scanner list so that, if multiple scanners
+ # claim they can scan the same suffix, earlier scanners
+ # in the list will overwrite later scanners, so that
+ # the result looks like a "first match" to the user.
+ if not SCons.Util.is_List(scanners):
+ scanners = [scanners]
+ else:
+ scanners = scanners[:] # copy so reverse() doesn't mod original
+ scanners.reverse()
+ for scanner in scanners:
+ for k in scanner.get_skeys(self):
+ result[k] = scanner
+
+ self._memo['_gsm'] = result
+
+ return result
+
+ def get_scanner(self, skey):
+ """Find the appropriate scanner given a key (usually a file suffix).
+ """
+ return self._gsm().get(skey)
+
+ def scanner_map_delete(self, kw=None):
+ """Delete the cached scanner map (if we need to).
+ """
+ try:
+ del self._memo['_gsm']
+ except KeyError:
+ pass
+
+ def _update(self, dict):
+ """Update an environment's values directly, bypassing the normal
+ checks that occur when users try to set items.
+ """
+ self._dict.update(dict)
+
+ def get_src_sig_type(self):
+ try:
+ return self.src_sig_type
+ except AttributeError:
+ t = SCons.Defaults.DefaultEnvironment().src_sig_type
+ self.src_sig_type = t
+ return t
+
+ def get_tgt_sig_type(self):
+ try:
+ return self.tgt_sig_type
+ except AttributeError:
+ t = SCons.Defaults.DefaultEnvironment().tgt_sig_type
+ self.tgt_sig_type = t
+ return t
+
+ #######################################################################
+ # Public methods for manipulating an Environment. These begin with
+ # upper-case letters. The essential characteristic of methods in
+ # this section is that they do *not* have corresponding same-named
+ # global functions. For example, a stand-alone Append() function
+ # makes no sense, because Append() is all about appending values to
+ # an Environment's construction variables.
+ #######################################################################
+
+ def Append(self, **kw):
+ """Append values to existing construction variables
+ in an Environment.
+ """
+ kw = copy_non_reserved_keywords(kw)
+ for key, val in kw.items():
+ # It would be easier on the eyes to write this using
+ # "continue" statements whenever we finish processing an item,
+ # but Python 1.5.2 apparently doesn't let you use "continue"
+ # within try:-except: blocks, so we have to nest our code.
+ try:
+ orig = self._dict[key]
+ except KeyError:
+ # No existing variable in the environment, so just set
+ # it to the new value.
+ self._dict[key] = val
+ else:
+ try:
+ # Check if the original looks like a dictionary.
+ # If it is, we can't just try adding the value because
+ # dictionaries don't have __add__() methods, and
+ # things like UserList will incorrectly coerce the
+ # original dict to a list (which we don't want).
+ update_dict = orig.update
+ except AttributeError:
+ try:
+ # Most straightforward: just try to add them
+ # together. This will work in most cases, when the
+ # original and new values are of compatible types.
+ self._dict[key] = orig + val
+ except (KeyError, TypeError):
+ try:
+ # Check if the original is a list.
+ add_to_orig = orig.append
+ except AttributeError:
+ # The original isn't a list, but the new
+ # value is (by process of elimination),
+ # so insert the original in the new value
+ # (if there's one to insert) and replace
+ # the variable with it.
+ if orig:
+ val.insert(0, orig)
+ self._dict[key] = val
+ else:
+ # The original is a list, so append the new
+ # value to it (if there's a value to append).
+ if val:
+ add_to_orig(val)
+ else:
+ # The original looks like a dictionary, so update it
+ # based on what we think the value looks like.
+ if SCons.Util.is_List(val):
+ for v in val:
+ orig[v] = None
+ else:
+ try:
+ update_dict(val)
+ except (AttributeError, TypeError, ValueError):
+ if SCons.Util.is_Dict(val):
+ for k, v in val.items():
+ orig[k] = v
+ else:
+ orig[val] = None
+ self.scanner_map_delete(kw)
+
+ def AppendENVPath(self, name, newpath, envname = 'ENV',
+ sep = os.pathsep, delete_existing=1):
+ """Append path elements to the path 'name' in the 'ENV'
+ dictionary for this environment. Will only add any particular
+ path once, and will normpath and normcase all paths to help
+ assure this. This can also handle the case where the env
+ variable is a list instead of a string.
+
+ If delete_existing is 0, a newpath which is already in the path
+ will not be moved to the end (it will be left where it is).
+ """
+
+ orig = ''
+ if self._dict.has_key(envname) and self._dict[envname].has_key(name):
+ orig = self._dict[envname][name]
+
+ nv = SCons.Util.AppendPath(orig, newpath, sep, delete_existing)
+
+ if not self._dict.has_key(envname):
+ self._dict[envname] = {}
+
+ self._dict[envname][name] = nv
+
+ def AppendUnique(self, delete_existing=0, **kw):
+ """Append values to existing construction variables
+ in an Environment, if they're not already there.
+ If delete_existing is 1, removes existing values first, so
+ values move to end.
+ """
+ kw = copy_non_reserved_keywords(kw)
+ for key, val in kw.items():
+ if SCons.Util.is_List(val):
+ val = _delete_duplicates(val, delete_existing)
+ if not self._dict.has_key(key) or self._dict[key] in ('', None):
+ self._dict[key] = val
+ elif SCons.Util.is_Dict(self._dict[key]) and \
+ SCons.Util.is_Dict(val):
+ self._dict[key].update(val)
+ elif SCons.Util.is_List(val):
+ dk = self._dict[key]
+ if not SCons.Util.is_List(dk):
+ dk = [dk]
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ else:
+ val = filter(lambda x, dk=dk: x not in dk, val)
+ self._dict[key] = dk + val
+ else:
+ dk = self._dict[key]
+ if SCons.Util.is_List(dk):
+ # By elimination, val is not a list. Since dk is a
+ # list, wrap val in a list first.
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ self._dict[key] = dk + [val]
+ else:
+ if not val in dk:
+ self._dict[key] = dk + [val]
+ else:
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ self._dict[key] = dk + val
+ self.scanner_map_delete(kw)
+
+ def Clone(self, tools=[], toolpath=None, parse_flags = None, **kw):
+ """Return a copy of a construction Environment. The
+ copy is like a Python "deep copy"--that is, independent
+ copies are made recursively of each objects--except that
+ a reference is copied when an object is not deep-copyable
+ (like a function). There are no references to any mutable
+ objects in the original Environment.
+ """
+ clone = copy.copy(self)
+ clone._dict = semi_deepcopy(self._dict)
+
+ try:
+ cbd = clone._dict['BUILDERS']
+ except KeyError:
+ pass
+ else:
+ clone._dict['BUILDERS'] = BuilderDict(cbd, clone)
+
+ # Check the methods added via AddMethod() and re-bind them to
+ # the cloned environment. Only do this if the attribute hasn't
+ # been overwritten by the user explicitly and still points to
+ # the added method.
+ clone.added_methods = []
+ for mw in self.added_methods:
+ if mw == getattr(self, mw.name):
+ clone.added_methods.append(mw.clone(clone))
+
+ clone._memo = {}
+
+ # Apply passed-in variables before the tools
+ # so the tools can use the new variables
+ kw = copy_non_reserved_keywords(kw)
+ new = {}
+ for key, value in kw.items():
+ new[key] = SCons.Subst.scons_subst_once(value, self, key)
+ apply(clone.Replace, (), new)
+
+ apply_tools(clone, tools, toolpath)
+
+ # apply them again in case the tools overwrote them
+ apply(clone.Replace, (), new)
+
+ # Finally, apply any flags to be merged in
+ if parse_flags: clone.MergeFlags(parse_flags)
+
+ if __debug__: logInstanceCreation(self, 'Environment.EnvironmentClone')
+ return clone
+
+ def Copy(self, *args, **kw):
+ global _warn_copy_deprecated
+ if _warn_copy_deprecated:
+ msg = "The env.Copy() method is deprecated; use the env.Clone() method instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedCopyWarning, msg)
+ _warn_copy_deprecated = False
+ return apply(self.Clone, args, kw)
+
+ def _changed_build(self, dependency, target, prev_ni):
+ if dependency.changed_state(target, prev_ni):
+ return 1
+ return self.decide_source(dependency, target, prev_ni)
+
+ def _changed_content(self, dependency, target, prev_ni):
+ return dependency.changed_content(target, prev_ni)
+
+ def _changed_source(self, dependency, target, prev_ni):
+ target_env = dependency.get_build_env()
+ type = target_env.get_tgt_sig_type()
+ if type == 'source':
+ return target_env.decide_source(dependency, target, prev_ni)
+ else:
+ return target_env.decide_target(dependency, target, prev_ni)
+
+ def _changed_timestamp_then_content(self, dependency, target, prev_ni):
+ return dependency.changed_timestamp_then_content(target, prev_ni)
+
+ def _changed_timestamp_newer(self, dependency, target, prev_ni):
+ return dependency.changed_timestamp_newer(target, prev_ni)
+
+ def _changed_timestamp_match(self, dependency, target, prev_ni):
+ return dependency.changed_timestamp_match(target, prev_ni)
+
+ def _copy_from_cache(self, src, dst):
+ return self.fs.copy(src, dst)
+
+ def _copy2_from_cache(self, src, dst):
+ return self.fs.copy2(src, dst)
+
+ def Decider(self, function):
+ copy_function = self._copy2_from_cache
+ if function in ('MD5', 'content'):
+ if not SCons.Util.md5:
+ raise UserError, "MD5 signatures are not available in this version of Python."
+ function = self._changed_content
+ elif function == 'MD5-timestamp':
+ function = self._changed_timestamp_then_content
+ elif function in ('timestamp-newer', 'make'):
+ function = self._changed_timestamp_newer
+ copy_function = self._copy_from_cache
+ elif function == 'timestamp-match':
+ function = self._changed_timestamp_match
+ elif not callable(function):
+ raise UserError, "Unknown Decider value %s" % repr(function)
+
+ # We don't use AddMethod because we don't want to turn the
+ # function, which only expects three arguments, into a bound
+ # method, which would add self as an initial, fourth argument.
+ self.decide_target = function
+ self.decide_source = function
+
+ self.copy_from_cache = copy_function
+
+ def Detect(self, progs):
+ """Return the first available program in progs.
+ """
+ if not SCons.Util.is_List(progs):
+ progs = [ progs ]
+ for prog in progs:
+ path = self.WhereIs(prog)
+ if path: return prog
+ return None
+
+ def Dictionary(self, *args):
+ if not args:
+ return self._dict
+ dlist = map(lambda x, s=self: s._dict[x], args)
+ if len(dlist) == 1:
+ dlist = dlist[0]
+ return dlist
+
+ def Dump(self, key = None):
+ """
+ Using the standard Python pretty printer, dump the contents of the
+ scons build environment to stdout.
+
+ If the key passed in is anything other than None, then that will
+ be used as an index into the build environment dictionary and
+ whatever is found there will be fed into the pretty printer. Note
+ that this key is case sensitive.
+ """
+ import pprint
+ pp = pprint.PrettyPrinter(indent=2)
+ if key:
+ dict = self.Dictionary(key)
+ else:
+ dict = self.Dictionary()
+ return pp.pformat(dict)
+
+ def FindIxes(self, paths, prefix, suffix):
+ """
+ Search a list of paths for something that matches the prefix and suffix.
+
+ paths - the list of paths or nodes.
+ prefix - construction variable for the prefix.
+ suffix - construction variable for the suffix.
+ """
+
+ suffix = self.subst('$'+suffix)
+ prefix = self.subst('$'+prefix)
+
+ for path in paths:
+ dir,name = os.path.split(str(path))
+ if name[:len(prefix)] == prefix and name[-len(suffix):] == suffix:
+ return path
+
+ def ParseConfig(self, command, function=None, unique=1):
+ """
+ Use the specified function to parse the output of the command
+ in order to modify the current environment. The 'command' can
+ be a string or a list of strings representing a command and
+ its arguments. 'Function' is an optional argument that takes
+ the environment, the output of the command, and the unique flag.
+ If no function is specified, MergeFlags, which treats the output
+ as the result of a typical 'X-config' command (i.e. gtk-config),
+ will merge the output into the appropriate variables.
+ """
+ if function is None:
+ def parse_conf(env, cmd, unique=unique):
+ return env.MergeFlags(cmd, unique)
+ function = parse_conf
+ if SCons.Util.is_List(command):
+ command = string.join(command)
+ command = self.subst(command)
+ return function(self, self.backtick(command))
+
+ def ParseDepends(self, filename, must_exist=None, only_one=0):
+ """
+ Parse a mkdep-style file for explicit dependencies. This is
+ completely abusable, and should be unnecessary in the "normal"
+ case of proper SCons configuration, but it may help make
+ the transition from a Make hierarchy easier for some people
+ to swallow. It can also be genuinely useful when using a tool
+ that can write a .d file, but for which writing a scanner would
+ be too complicated.
+ """
+ filename = self.subst(filename)
+ try:
+ fp = open(filename, 'r')
+ except IOError:
+ if must_exist:
+ raise
+ return
+ lines = SCons.Util.LogicalLines(fp).readlines()
+ lines = filter(lambda l: l[0] != '#', lines)
+ tdlist = []
+ for line in lines:
+ try:
+ target, depends = string.split(line, ':', 1)
+ except (AttributeError, TypeError, ValueError):
+ # Python 1.5.2 throws TypeError if line isn't a string,
+ # Python 2.x throws AttributeError because it tries
+ # to call line.split(). Either can throw ValueError
+ # if the line doesn't split into two or more elements.
+ pass
+ else:
+ tdlist.append((string.split(target), string.split(depends)))
+ if only_one:
+ targets = reduce(lambda x, y: x+y, map(lambda p: p[0], tdlist))
+ if len(targets) > 1:
+ raise SCons.Errors.UserError, "More than one dependency target found in `%s': %s" % (filename, targets)
+ for target, depends in tdlist:
+ self.Depends(target, depends)
+
+ def Platform(self, platform):
+ platform = self.subst(platform)
+ return SCons.Platform.Platform(platform)(self)
+
+ def Prepend(self, **kw):
+ """Prepend values to existing construction variables
+ in an Environment.
+ """
+ kw = copy_non_reserved_keywords(kw)
+ for key, val in kw.items():
+ # It would be easier on the eyes to write this using
+ # "continue" statements whenever we finish processing an item,
+ # but Python 1.5.2 apparently doesn't let you use "continue"
+ # within try:-except: blocks, so we have to nest our code.
+ try:
+ orig = self._dict[key]
+ except KeyError:
+ # No existing variable in the environment, so just set
+ # it to the new value.
+ self._dict[key] = val
+ else:
+ try:
+ # Check if the original looks like a dictionary.
+ # If it is, we can't just try adding the value because
+ # dictionaries don't have __add__() methods, and
+ # things like UserList will incorrectly coerce the
+ # original dict to a list (which we don't want).
+ update_dict = orig.update
+ except AttributeError:
+ try:
+ # Most straightforward: just try to add them
+ # together. This will work in most cases, when the
+ # original and new values are of compatible types.
+ self._dict[key] = val + orig
+ except (KeyError, TypeError):
+ try:
+ # Check if the added value is a list.
+ add_to_val = val.append
+ except AttributeError:
+ # The added value isn't a list, but the
+ # original is (by process of elimination),
+ # so insert the the new value in the original
+ # (if there's one to insert).
+ if val:
+ orig.insert(0, val)
+ else:
+ # The added value is a list, so append
+ # the original to it (if there's a value
+ # to append).
+ if orig:
+ add_to_val(orig)
+ self._dict[key] = val
+ else:
+ # The original looks like a dictionary, so update it
+ # based on what we think the value looks like.
+ if SCons.Util.is_List(val):
+ for v in val:
+ orig[v] = None
+ else:
+ try:
+ update_dict(val)
+ except (AttributeError, TypeError, ValueError):
+ if SCons.Util.is_Dict(val):
+ for k, v in val.items():
+ orig[k] = v
+ else:
+ orig[val] = None
+ self.scanner_map_delete(kw)
+
+ def PrependENVPath(self, name, newpath, envname = 'ENV', sep = os.pathsep,
+ delete_existing=1):
+ """Prepend path elements to the path 'name' in the 'ENV'
+ dictionary for this environment. Will only add any particular
+ path once, and will normpath and normcase all paths to help
+ assure this. This can also handle the case where the env
+ variable is a list instead of a string.
+
+ If delete_existing is 0, a newpath which is already in the path
+ will not be moved to the front (it will be left where it is).
+ """
+
+ orig = ''
+ if self._dict.has_key(envname) and self._dict[envname].has_key(name):
+ orig = self._dict[envname][name]
+
+ nv = SCons.Util.PrependPath(orig, newpath, sep, delete_existing)
+
+ if not self._dict.has_key(envname):
+ self._dict[envname] = {}
+
+ self._dict[envname][name] = nv
+
+ def PrependUnique(self, delete_existing=0, **kw):
+ """Prepend values to existing construction variables
+ in an Environment, if they're not already there.
+ If delete_existing is 1, removes existing values first, so
+ values move to front.
+ """
+ kw = copy_non_reserved_keywords(kw)
+ for key, val in kw.items():
+ if SCons.Util.is_List(val):
+ val = _delete_duplicates(val, not delete_existing)
+ if not self._dict.has_key(key) or self._dict[key] in ('', None):
+ self._dict[key] = val
+ elif SCons.Util.is_Dict(self._dict[key]) and \
+ SCons.Util.is_Dict(val):
+ self._dict[key].update(val)
+ elif SCons.Util.is_List(val):
+ dk = self._dict[key]
+ if not SCons.Util.is_List(dk):
+ dk = [dk]
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ else:
+ val = filter(lambda x, dk=dk: x not in dk, val)
+ self._dict[key] = val + dk
+ else:
+ dk = self._dict[key]
+ if SCons.Util.is_List(dk):
+ # By elimination, val is not a list. Since dk is a
+ # list, wrap val in a list first.
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ self._dict[key] = [val] + dk
+ else:
+ if not val in dk:
+ self._dict[key] = [val] + dk
+ else:
+ if delete_existing:
+ dk = filter(lambda x, val=val: x not in val, dk)
+ self._dict[key] = val + dk
+ self.scanner_map_delete(kw)
+
+ def Replace(self, **kw):
+ """Replace existing construction variables in an Environment
+ with new construction variables and/or values.
+ """
+ try:
+ kwbd = kw['BUILDERS']
+ except KeyError:
+ pass
+ else:
+ kwbd = semi_deepcopy(kwbd)
+ del kw['BUILDERS']
+ self.__setitem__('BUILDERS', kwbd)
+ kw = copy_non_reserved_keywords(kw)
+ self._update(semi_deepcopy(kw))
+ self.scanner_map_delete(kw)
+
+ def ReplaceIxes(self, path, old_prefix, old_suffix, new_prefix, new_suffix):
+ """
+ Replace old_prefix with new_prefix and old_suffix with new_suffix.
+
+ env - Environment used to interpolate variables.
+ path - the path that will be modified.
+ old_prefix - construction variable for the old prefix.
+ old_suffix - construction variable for the old suffix.
+ new_prefix - construction variable for the new prefix.
+ new_suffix - construction variable for the new suffix.
+ """
+ old_prefix = self.subst('$'+old_prefix)
+ old_suffix = self.subst('$'+old_suffix)
+
+ new_prefix = self.subst('$'+new_prefix)
+ new_suffix = self.subst('$'+new_suffix)
+
+ dir,name = os.path.split(str(path))
+ if name[:len(old_prefix)] == old_prefix:
+ name = name[len(old_prefix):]
+ if name[-len(old_suffix):] == old_suffix:
+ name = name[:-len(old_suffix)]
+ return os.path.join(dir, new_prefix+name+new_suffix)
+
+ def SetDefault(self, **kw):
+ for k in kw.keys():
+ if self._dict.has_key(k):
+ del kw[k]
+ apply(self.Replace, (), kw)
+
+ def _find_toolpath_dir(self, tp):
+ return self.fs.Dir(self.subst(tp)).srcnode().abspath
+
+ def Tool(self, tool, toolpath=None, **kw):
+ if SCons.Util.is_String(tool):
+ tool = self.subst(tool)
+ if toolpath is None:
+ toolpath = self.get('toolpath', [])
+ toolpath = map(self._find_toolpath_dir, toolpath)
+ tool = apply(SCons.Tool.Tool, (tool, toolpath), kw)
+ tool(self)
+
+ def WhereIs(self, prog, path=None, pathext=None, reject=[]):
+ """Find prog in the path.
+ """
+ if path is None:
+ try:
+ path = self['ENV']['PATH']
+ except KeyError:
+ pass
+ elif SCons.Util.is_String(path):
+ path = self.subst(path)
+ if pathext is None:
+ try:
+ pathext = self['ENV']['PATHEXT']
+ except KeyError:
+ pass
+ elif SCons.Util.is_String(pathext):
+ pathext = self.subst(pathext)
+ prog = self.subst(prog)
+ path = SCons.Util.WhereIs(prog, path, pathext, reject)
+ if path: return path
+ return None
+
+ #######################################################################
+ # Public methods for doing real "SCons stuff" (manipulating
+ # dependencies, setting attributes on targets, etc.). These begin
+ # with upper-case letters. The essential characteristic of methods
+ # in this section is that they all *should* have corresponding
+ # same-named global functions.
+ #######################################################################
+
+ def Action(self, *args, **kw):
+ def subst_string(a, self=self):
+ if SCons.Util.is_String(a):
+ a = self.subst(a)
+ return a
+ nargs = map(subst_string, args)
+ nkw = self.subst_kw(kw)
+ return apply(SCons.Action.Action, nargs, nkw)
+
+ def AddPreAction(self, files, action):
+ nodes = self.arg2nodes(files, self.fs.Entry)
+ action = SCons.Action.Action(action)
+ uniq = {}
+ for executor in map(lambda n: n.get_executor(), nodes):
+ uniq[executor] = 1
+ for executor in uniq.keys():
+ executor.add_pre_action(action)
+ return nodes
+
+ def AddPostAction(self, files, action):
+ nodes = self.arg2nodes(files, self.fs.Entry)
+ action = SCons.Action.Action(action)
+ uniq = {}
+ for executor in map(lambda n: n.get_executor(), nodes):
+ uniq[executor] = 1
+ for executor in uniq.keys():
+ executor.add_post_action(action)
+ return nodes
+
+ def Alias(self, target, source=[], action=None, **kw):
+ tlist = self.arg2nodes(target, self.ans.Alias)
+ if not SCons.Util.is_List(source):
+ source = [source]
+ source = filter(None, source)
+
+ if not action:
+ if not source:
+ # There are no source files and no action, so just
+ # return a target list of classic Alias Nodes, without
+ # any builder. The externally visible effect is that
+ # this will make the wrapping Script.BuildTask class
+ # say that there's "Nothing to be done" for this Alias,
+ # instead of that it's "up to date."
+ return tlist
+
+ # No action, but there are sources. Re-call all the target
+ # builders to add the sources to each target.
+ result = []
+ for t in tlist:
+ bld = t.get_builder(AliasBuilder)
+ result.extend(bld(self, t, source))
+ return result
+
+ nkw = self.subst_kw(kw)
+ nkw.update({
+ 'action' : SCons.Action.Action(action),
+ 'source_factory' : self.fs.Entry,
+ 'multi' : 1,
+ 'is_explicit' : None,
+ })
+ bld = apply(SCons.Builder.Builder, (), nkw)
+
+ # Apply the Builder separately to each target so that the Aliases
+ # stay separate. If we did one "normal" Builder call with the
+ # whole target list, then all of the target Aliases would be
+ # associated under a single Executor.
+ result = []
+ for t in tlist:
+ # Calling the convert() method will cause a new Executor to be
+ # created from scratch, so we have to explicitly initialize
+ # it with the target's existing sources, plus our new ones,
+ # so nothing gets lost.
+ b = t.get_builder()
+ if b is None or b is AliasBuilder:
+ b = bld
+ else:
+ nkw['action'] = b.action + action
+ b = apply(SCons.Builder.Builder, (), nkw)
+ t.convert()
+ result.extend(b(self, t, t.sources + source))
+ return result
+
+ def AlwaysBuild(self, *targets):
+ tlist = []
+ for t in targets:
+ tlist.extend(self.arg2nodes(t, self.fs.Entry))
+ for t in tlist:
+ t.set_always_build()
+ return tlist
+
+ def BuildDir(self, *args, **kw):
+ if kw.has_key('build_dir'):
+ kw['variant_dir'] = kw['build_dir']
+ del kw['build_dir']
+ return apply(self.VariantDir, args, kw)
+
+ def Builder(self, **kw):
+ nkw = self.subst_kw(kw)
+ return apply(SCons.Builder.Builder, [], nkw)
+
+ def CacheDir(self, path):
+ import SCons.CacheDir
+ if not path is None:
+ path = self.subst(path)
+ self._CacheDir_path = path
+
+ def Clean(self, targets, files):
+ global CleanTargets
+ tlist = self.arg2nodes(targets, self.fs.Entry)
+ flist = self.arg2nodes(files, self.fs.Entry)
+ for t in tlist:
+ try:
+ CleanTargets[t].extend(flist)
+ except KeyError:
+ CleanTargets[t] = flist
+
+ def Configure(self, *args, **kw):
+ nargs = [self]
+ if args:
+ nargs = nargs + self.subst_list(args)[0]
+ nkw = self.subst_kw(kw)
+ nkw['_depth'] = kw.get('_depth', 0) + 1
+ try:
+ nkw['custom_tests'] = self.subst_kw(nkw['custom_tests'])
+ except KeyError:
+ pass
+ return apply(SCons.SConf.SConf, nargs, nkw)
+
+ def Command(self, target, source, action, **kw):
+ """Builds the supplied target files from the supplied
+ source files using the supplied action. Action may
+ be any type that the Builder constructor will accept
+ for an action."""
+ bkw = {
+ 'action' : action,
+ 'target_factory' : self.fs.Entry,
+ 'source_factory' : self.fs.Entry,
+ }
+ try: bkw['source_scanner'] = kw['source_scanner']
+ except KeyError: pass
+ else: del kw['source_scanner']
+ bld = apply(SCons.Builder.Builder, (), bkw)
+ return apply(bld, (self, target, source), kw)
+
+ def Depends(self, target, dependency):
+ """Explicity specify that 'target's depend on 'dependency'."""
+ tlist = self.arg2nodes(target, self.fs.Entry)
+ dlist = self.arg2nodes(dependency, self.fs.Entry)
+ for t in tlist:
+ t.add_dependency(dlist)
+ return tlist
+
+ def Dir(self, name, *args, **kw):
+ """
+ """
+ s = self.subst(name)
+ if SCons.Util.is_Sequence(s):
+ result=[]
+ for e in s:
+ result.append(apply(self.fs.Dir, (e,) + args, kw))
+ return result
+ return apply(self.fs.Dir, (s,) + args, kw)
+
+ def NoClean(self, *targets):
+ """Tags a target so that it will not be cleaned by -c"""
+ tlist = []
+ for t in targets:
+ tlist.extend(self.arg2nodes(t, self.fs.Entry))
+ for t in tlist:
+ t.set_noclean()
+ return tlist
+
+ def NoCache(self, *targets):
+ """Tags a target so that it will not be cached"""
+ tlist = []
+ for t in targets:
+ tlist.extend(self.arg2nodes(t, self.fs.Entry))
+ for t in tlist:
+ t.set_nocache()
+ return tlist
+
+ def Entry(self, name, *args, **kw):
+ """
+ """
+ s = self.subst(name)
+ if SCons.Util.is_Sequence(s):
+ result=[]
+ for e in s:
+ result.append(apply(self.fs.Entry, (e,) + args, kw))
+ return result
+ return apply(self.fs.Entry, (s,) + args, kw)
+
+ def Environment(self, **kw):
+ return apply(SCons.Environment.Environment, [], self.subst_kw(kw))
+
+ def Execute(self, action, *args, **kw):
+ """Directly execute an action through an Environment
+ """
+ action = apply(self.Action, (action,) + args, kw)
+ result = action([], [], self)
+ if isinstance(result, SCons.Errors.BuildError):
+ errstr = result.errstr
+ if result.filename:
+ errstr = result.filename + ': ' + errstr
+ sys.stderr.write("scons: *** %s\n" % errstr)
+ return result.status
+ else:
+ return result
+
+ def File(self, name, *args, **kw):
+ """
+ """
+ s = self.subst(name)
+ if SCons.Util.is_Sequence(s):
+ result=[]
+ for e in s:
+ result.append(apply(self.fs.File, (e,) + args, kw))
+ return result
+ return apply(self.fs.File, (s,) + args, kw)
+
+ def FindFile(self, file, dirs):
+ file = self.subst(file)
+ nodes = self.arg2nodes(dirs, self.fs.Dir)
+ return SCons.Node.FS.find_file(file, tuple(nodes))
+
+ def Flatten(self, sequence):
+ return SCons.Util.flatten(sequence)
+
+ def GetBuildPath(self, files):
+ result = map(str, self.arg2nodes(files, self.fs.Entry))
+ if SCons.Util.is_List(files):
+ return result
+ else:
+ return result[0]
+
+ def Glob(self, pattern, ondisk=True, source=False, strings=False):
+ return self.fs.Glob(self.subst(pattern), ondisk, source, strings)
+
+ def Ignore(self, target, dependency):
+ """Ignore a dependency."""
+ tlist = self.arg2nodes(target, self.fs.Entry)
+ dlist = self.arg2nodes(dependency, self.fs.Entry)
+ for t in tlist:
+ t.add_ignore(dlist)
+ return tlist
+
+ def Literal(self, string):
+ return SCons.Subst.Literal(string)
+
+ def Local(self, *targets):
+ ret = []
+ for targ in targets:
+ if isinstance(targ, SCons.Node.Node):
+ targ.set_local()
+ ret.append(targ)
+ else:
+ for t in self.arg2nodes(targ, self.fs.Entry):
+ t.set_local()
+ ret.append(t)
+ return ret
+
+ def Precious(self, *targets):
+ tlist = []
+ for t in targets:
+ tlist.extend(self.arg2nodes(t, self.fs.Entry))
+ for t in tlist:
+ t.set_precious()
+ return tlist
+
+ def Repository(self, *dirs, **kw):
+ dirs = self.arg2nodes(list(dirs), self.fs.Dir)
+ apply(self.fs.Repository, dirs, kw)
+
+ def Requires(self, target, prerequisite):
+ """Specify that 'prerequisite' must be built before 'target',
+ (but 'target' does not actually depend on 'prerequisite'
+ and need not be rebuilt if it changes)."""
+ tlist = self.arg2nodes(target, self.fs.Entry)
+ plist = self.arg2nodes(prerequisite, self.fs.Entry)
+ for t in tlist:
+ t.add_prerequisite(plist)
+ return tlist
+
+ def Scanner(self, *args, **kw):
+ nargs = []
+ for arg in args:
+ if SCons.Util.is_String(arg):
+ arg = self.subst(arg)
+ nargs.append(arg)
+ nkw = self.subst_kw(kw)
+ return apply(SCons.Scanner.Base, nargs, nkw)
+
+ def SConsignFile(self, name=".sconsign", dbm_module=None):
+ if not name is None:
+ name = self.subst(name)
+ if not os.path.isabs(name):
+ name = os.path.join(str(self.fs.SConstruct_dir), name)
+ if name:
+ name = os.path.normpath(name)
+ sconsign_dir = os.path.dirname(name)
+ if sconsign_dir and not os.path.exists(sconsign_dir):
+ self.Execute(SCons.Defaults.Mkdir(sconsign_dir))
+ SCons.SConsign.File(name, dbm_module)
+
+ def SideEffect(self, side_effect, target):
+ """Tell scons that side_effects are built as side
+ effects of building targets."""
+ side_effects = self.arg2nodes(side_effect, self.fs.Entry)
+ targets = self.arg2nodes(target, self.fs.Entry)
+
+ for side_effect in side_effects:
+ if side_effect.multiple_side_effect_has_builder():
+ raise SCons.Errors.UserError, "Multiple ways to build the same target were specified for: %s" % str(side_effect)
+ side_effect.add_source(targets)
+ side_effect.side_effect = 1
+ self.Precious(side_effect)
+ for target in targets:
+ target.side_effects.append(side_effect)
+ return side_effects
+
+ def SourceCode(self, entry, builder):
+ """Arrange for a source code builder for (part of) a tree."""
+ entries = self.arg2nodes(entry, self.fs.Entry)
+ for entry in entries:
+ entry.set_src_builder(builder)
+ return entries
+
+ def SourceSignatures(self, type):
+ global _warn_source_signatures_deprecated
+ if _warn_source_signatures_deprecated:
+ msg = "The env.SourceSignatures() method is deprecated;\n" + \
+ "\tconvert your build to use the env.Decider() method instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedSourceSignaturesWarning, msg)
+ _warn_source_signatures_deprecated = False
+ type = self.subst(type)
+ self.src_sig_type = type
+ if type == 'MD5':
+ if not SCons.Util.md5:
+ raise UserError, "MD5 signatures are not available in this version of Python."
+ self.decide_source = self._changed_content
+ elif type == 'timestamp':
+ self.decide_source = self._changed_timestamp_match
+ else:
+ raise UserError, "Unknown source signature type '%s'" % type
+
+ def Split(self, arg):
+ """This function converts a string or list into a list of strings
+ or Nodes. This makes things easier for users by allowing files to
+ be specified as a white-space separated list to be split.
+ The input rules are:
+ - A single string containing names separated by spaces. These will be
+ split apart at the spaces.
+ - A single Node instance
+ - A list containing either strings or Node instances. Any strings
+ in the list are not split at spaces.
+ In all cases, the function returns a list of Nodes and strings."""
+ if SCons.Util.is_List(arg):
+ return map(self.subst, arg)
+ elif SCons.Util.is_String(arg):
+ return string.split(self.subst(arg))
+ else:
+ return [self.subst(arg)]
+
+ def TargetSignatures(self, type):
+ global _warn_target_signatures_deprecated
+ if _warn_target_signatures_deprecated:
+ msg = "The env.TargetSignatures() method is deprecated;\n" + \
+ "\tconvert your build to use the env.Decider() method instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedTargetSignaturesWarning, msg)
+ _warn_target_signatures_deprecated = False
+ type = self.subst(type)
+ self.tgt_sig_type = type
+ if type in ('MD5', 'content'):
+ if not SCons.Util.md5:
+ raise UserError, "MD5 signatures are not available in this version of Python."
+ self.decide_target = self._changed_content
+ elif type == 'timestamp':
+ self.decide_target = self._changed_timestamp_match
+ elif type == 'build':
+ self.decide_target = self._changed_build
+ elif type == 'source':
+ self.decide_target = self._changed_source
+ else:
+ raise UserError, "Unknown target signature type '%s'"%type
+
+ def Value(self, value, built_value=None):
+ """
+ """
+ return SCons.Node.Python.Value(value, built_value)
+
+ def VariantDir(self, variant_dir, src_dir, duplicate=1):
+ variant_dir = self.arg2nodes(variant_dir, self.fs.Dir)[0]
+ src_dir = self.arg2nodes(src_dir, self.fs.Dir)[0]
+ self.fs.VariantDir(variant_dir, src_dir, duplicate)
+
+ def FindSourceFiles(self, node='.'):
+ """ returns a list of all source files.
+ """
+ node = self.arg2nodes(node, self.fs.Entry)[0]
+
+ sources = []
+ # Uncomment this and get rid of the global definition when we
+ # drop support for pre-2.2 Python versions.
+ #def build_source(ss, result):
+ # for s in ss:
+ # if isinstance(s, SCons.Node.FS.Dir):
+ # build_source(s.all_children(), result)
+ # elif s.has_builder():
+ # build_source(s.sources, result)
+ # elif isinstance(s.disambiguate(), SCons.Node.FS.File):
+ # result.append(s)
+ build_source(node.all_children(), sources)
+
+ # now strip the build_node from the sources by calling the srcnode
+ # function
+ def get_final_srcnode(file):
+ srcnode = file.srcnode()
+ while srcnode != file.srcnode():
+ srcnode = file.srcnode()
+ return srcnode
+
+ # get the final srcnode for all nodes, this means stripping any
+ # attached build node.
+ map( get_final_srcnode, sources )
+
+ # remove duplicates
+ return list(set(sources))
+
+ def FindInstalledFiles(self):
+ """ returns the list of all targets of the Install and InstallAs Builder.
+ """
+ from SCons.Tool import install
+ if install._UNIQUE_INSTALLED_FILES is None:
+ install._UNIQUE_INSTALLED_FILES = SCons.Util.uniquer_hashables(install._INSTALLED_FILES)
+ return install._UNIQUE_INSTALLED_FILES
+
+class OverrideEnvironment(Base):
+ """A proxy that overrides variables in a wrapped construction
+ environment by returning values from an overrides dictionary in
+ preference to values from the underlying subject environment.
+
+ This is a lightweight (I hope) proxy that passes through most use of
+ attributes to the underlying Environment.Base class, but has just
+ enough additional methods defined to act like a real construction
+ environment with overridden values. It can wrap either a Base
+ construction environment, or another OverrideEnvironment, which
+ can in turn nest arbitrary OverrideEnvironments...
+
+ Note that we do *not* call the underlying base class
+ (SubsitutionEnvironment) initialization, because we get most of those
+ from proxying the attributes of the subject construction environment.
+ But because we subclass SubstitutionEnvironment, this class also
+ has inherited arg2nodes() and subst*() methods; those methods can't
+ be proxied because they need *this* object's methods to fetch the
+ values from the overrides dictionary.
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ def __init__(self, subject, overrides={}):
+ if __debug__: logInstanceCreation(self, 'Environment.OverrideEnvironment')
+ self.__dict__['__subject'] = subject
+ self.__dict__['overrides'] = overrides
+
+ # Methods that make this class act like a proxy.
+ def __getattr__(self, name):
+ return getattr(self.__dict__['__subject'], name)
+ def __setattr__(self, name, value):
+ setattr(self.__dict__['__subject'], name, value)
+
+ # Methods that make this class act like a dictionary.
+ def __getitem__(self, key):
+ try:
+ return self.__dict__['overrides'][key]
+ except KeyError:
+ return self.__dict__['__subject'].__getitem__(key)
+ def __setitem__(self, key, value):
+ if not is_valid_construction_var(key):
+ raise SCons.Errors.UserError, "Illegal construction variable `%s'" % key
+ self.__dict__['overrides'][key] = value
+ def __delitem__(self, key):
+ try:
+ del self.__dict__['overrides'][key]
+ except KeyError:
+ deleted = 0
+ else:
+ deleted = 1
+ try:
+ result = self.__dict__['__subject'].__delitem__(key)
+ except KeyError:
+ if not deleted:
+ raise
+ result = None
+ return result
+ def get(self, key, default=None):
+ """Emulates the get() method of dictionaries."""
+ try:
+ return self.__dict__['overrides'][key]
+ except KeyError:
+ return self.__dict__['__subject'].get(key, default)
+ def has_key(self, key):
+ try:
+ self.__dict__['overrides'][key]
+ return 1
+ except KeyError:
+ return self.__dict__['__subject'].has_key(key)
+ def __contains__(self, key):
+ if self.__dict__['overrides'].__contains__(key):
+ return 1
+ return self.__dict__['__subject'].__contains__(key)
+ def Dictionary(self):
+ """Emulates the items() method of dictionaries."""
+ d = self.__dict__['__subject'].Dictionary().copy()
+ d.update(self.__dict__['overrides'])
+ return d
+ def items(self):
+ """Emulates the items() method of dictionaries."""
+ return self.Dictionary().items()
+
+ # Overridden private construction environment methods.
+ def _update(self, dict):
+ """Update an environment's values directly, bypassing the normal
+ checks that occur when users try to set items.
+ """
+ self.__dict__['overrides'].update(dict)
+
+ def gvars(self):
+ return self.__dict__['__subject'].gvars()
+
+ def lvars(self):
+ lvars = self.__dict__['__subject'].lvars()
+ lvars.update(self.__dict__['overrides'])
+ return lvars
+
+ # Overridden public construction environment methods.
+ def Replace(self, **kw):
+ kw = copy_non_reserved_keywords(kw)
+ self.__dict__['overrides'].update(semi_deepcopy(kw))
+
+# The entry point that will be used by the external world
+# to refer to a construction environment. This allows the wrapper
+# interface to extend a construction environment for its own purposes
+# by subclassing SCons.Environment.Base and then assigning the
+# class to SCons.Environment.Environment.
+
+Environment = Base
+
+# An entry point for returning a proxy subclass instance that overrides
+# the subst*() methods so they don't actually perform construction
+# variable substitution. This is specifically intended to be the shim
+# layer in between global function calls (which don't want construction
+# variable substitution) and the DefaultEnvironment() (which would
+# substitute variables if left to its own devices)."""
+#
+# We have to wrap this in a function that allows us to delay definition of
+# the class until it's necessary, so that when it subclasses Environment
+# it will pick up whatever Environment subclass the wrapper interface
+# might have assigned to SCons.Environment.Environment.
+
+def NoSubstitutionProxy(subject):
+ class _NoSubstitutionProxy(Environment):
+ def __init__(self, subject):
+ self.__dict__['__subject'] = subject
+ def __getattr__(self, name):
+ return getattr(self.__dict__['__subject'], name)
+ def __setattr__(self, name, value):
+ return setattr(self.__dict__['__subject'], name, value)
+ def raw_to_mode(self, dict):
+ try:
+ raw = dict['raw']
+ except KeyError:
+ pass
+ else:
+ del dict['raw']
+ dict['mode'] = raw
+ def subst(self, string, *args, **kwargs):
+ return string
+ def subst_kw(self, kw, *args, **kwargs):
+ return kw
+ def subst_list(self, string, *args, **kwargs):
+ nargs = (string, self,) + args
+ nkw = kwargs.copy()
+ nkw['gvars'] = {}
+ self.raw_to_mode(nkw)
+ return apply(SCons.Subst.scons_subst_list, nargs, nkw)
+ def subst_target_source(self, string, *args, **kwargs):
+ nargs = (string, self,) + args
+ nkw = kwargs.copy()
+ nkw['gvars'] = {}
+ self.raw_to_mode(nkw)
+ return apply(SCons.Subst.scons_subst, nargs, nkw)
+ return _NoSubstitutionProxy(subject)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Errors.py b/tools/scons/scons-local-1.2.0/SCons/Errors.py
new file mode 100644
index 000000000..8369873c7
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Errors.py
@@ -0,0 +1,198 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""SCons.Errors
+
+This file contains the exception classes used to handle internal
+and user errors in SCons.
+
+"""
+
+__revision__ = "src/engine/SCons/Errors.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import exceptions
+
+class BuildError(Exception):
+ """ Errors occuring while building.
+
+ BuildError have the following attributes:
+
+ Information about the cause of the build error:
+ -----------------------------------------------
+
+ errstr : a description of the error message
+
+ status : the return code of the action that caused the build
+ error. Must be set to a non-zero value even if the
+ build error is not due to an action returning a
+ non-zero returned code.
+
+ exitstatus : SCons exit status due to this build error.
+ Must be nonzero unless due to an explicit Exit()
+ call. Not always the same as status, since
+ actions return a status code that should be
+ respected, but SCons typically exits with 2
+ irrespective of the return value of the failed
+ action.
+
+ filename : The name of the file or directory that caused the
+ build error. Set to None if no files are associated with
+ this error. This might be different from the target
+ being built. For example, failure to create the
+ directory in which the target file will appear. It
+ can be None if the error is not due to a particular
+ filename.
+
+ exc_info : Info about exception that caused the build
+ error. Set to (None, None, None) if this build
+ error is not due to an exception.
+
+
+ Information about the cause of the location of the error:
+ ---------------------------------------------------------
+
+ node : the error occured while building this target node(s)
+
+ executor : the executor that caused the build to fail (might
+ be None if the build failures is not due to the
+ executor failing)
+
+ action : the action that caused the build to fail (might be
+ None if the build failures is not due to the an
+ action failure)
+
+ command : the command line for the action that caused the
+ build to fail (might be None if the build failures
+ is not due to the an action failure)
+ """
+
+ def __init__(self,
+ node=None, errstr="Unknown error", status=2, exitstatus=2,
+ filename=None, executor=None, action=None, command=None,
+ exc_info=(None, None, None)):
+
+ self.errstr = errstr
+ self.status = status
+ self.exitstatus = exitstatus
+ self.filename = filename
+ self.exc_info = exc_info
+
+ self.node = node
+ self.executor = executor
+ self.action = action
+ self.command = command
+
+ Exception.__init__(self, node, errstr, status, exitstatus, filename,
+ executor, action, command, exc_info)
+
+ def __str__(self):
+ if self.filename:
+ return self.filename + ': ' + self.errstr
+ else:
+ return self.errstr
+
+class InternalError(Exception):
+ pass
+
+class UserError(Exception):
+ pass
+
+class StopError(Exception):
+ pass
+
+class EnvironmentError(Exception):
+ pass
+
+class ExplicitExit(Exception):
+ def __init__(self, node=None, status=None, *args):
+ self.node = node
+ self.status = status
+ self.exitstatus = status
+ apply(Exception.__init__, (self,) + args)
+
+def convert_to_BuildError(status, exc_info=None):
+ """
+ Convert any return code a BuildError Exception.
+
+ `status' can either be a return code or an Exception.
+ The buildError.status we set here will normally be
+ used as the exit status of the "scons" process.
+ """
+ if not exc_info and isinstance(status, Exception):
+ exc_info = (status.__class__, status, None)
+
+ if isinstance(status, BuildError):
+ buildError = status
+ buildError.exitstatus = 2 # always exit with 2 on build errors
+ elif isinstance(status, ExplicitExit):
+ status = status.status
+ errstr = 'Explicit exit, status %s' % status
+ buildError = BuildError(
+ errstr=errstr,
+ status=status, # might be 0, OK here
+ exitstatus=status, # might be 0, OK here
+ exc_info=exc_info)
+ # TODO(1.5):
+ #elif isinstance(status, (StopError, UserError)):
+ elif isinstance(status, StopError) or isinstance(status, UserError):
+ buildError = BuildError(
+ errstr=str(status),
+ status=2,
+ exitstatus=2,
+ exc_info=exc_info)
+ elif isinstance(status, exceptions.EnvironmentError):
+ # If an IOError/OSError happens, raise a BuildError.
+ # Report the name of the file or directory that caused the
+ # error, which might be different from the target being built
+ # (for example, failure to create the directory in which the
+ # target file will appear).
+ try: filename = status.filename
+ except AttributeError: filename = None
+ buildError = BuildError(
+ errstr=status.strerror,
+ status=status.errno,
+ exitstatus=2,
+ filename=filename,
+ exc_info=exc_info)
+ elif isinstance(status, Exception):
+ buildError = BuildError(
+ errstr='%s : %s' % (status.__class__.__name__, status),
+ status=2,
+ exitstatus=2,
+ exc_info=exc_info)
+ elif SCons.Util.is_String(status):
+ buildError = BuildError(
+ errstr=status,
+ status=2,
+ exitstatus=2)
+ else:
+ buildError = BuildError(
+ errstr="Error %s" % status,
+ status=status,
+ exitstatus=2)
+
+ #import sys
+ #sys.stderr.write("convert_to_BuildError: status %s => (errstr %s, status %s)"%(status,buildError.errstr, buildError.status))
+ return buildError
diff --git a/tools/scons/scons-local-1.2.0/SCons/Executor.py b/tools/scons/scons-local-1.2.0/SCons/Executor.py
new file mode 100644
index 000000000..a37da0719
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Executor.py
@@ -0,0 +1,393 @@
+"""SCons.Executor
+
+A module for executing actions with specific lists of target and source
+Nodes.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Executor.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+
+from SCons.Debug import logInstanceCreation
+import SCons.Errors
+import SCons.Memoize
+
+
+class Executor:
+ """A class for controlling instances of executing an action.
+
+ This largely exists to hold a single association of an action,
+ environment, list of environment override dictionaries, targets
+ and sources for later processing as needed.
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ def __init__(self, action, env=None, overridelist=[{}],
+ targets=[], sources=[], builder_kw={}):
+ if __debug__: logInstanceCreation(self, 'Executor.Executor')
+ self.set_action_list(action)
+ self.pre_actions = []
+ self.post_actions = []
+ self.env = env
+ self.overridelist = overridelist
+ self.targets = targets
+ self.sources = sources[:]
+ self.sources_need_sorting = False
+ self.builder_kw = builder_kw
+ self._memo = {}
+
+ def set_action_list(self, action):
+ import SCons.Util
+ if not SCons.Util.is_List(action):
+ if not action:
+ import SCons.Errors
+ raise SCons.Errors.UserError, "Executor must have an action."
+ action = [action]
+ self.action_list = action
+
+ def get_action_list(self):
+ return self.pre_actions + self.action_list + self.post_actions
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_build_env'))
+
+ def get_build_env(self):
+ """Fetch or create the appropriate build Environment
+ for this Executor.
+ """
+ try:
+ return self._memo['get_build_env']
+ except KeyError:
+ pass
+
+ # Create the build environment instance with appropriate
+ # overrides. These get evaluated against the current
+ # environment's construction variables so that users can
+ # add to existing values by referencing the variable in
+ # the expansion.
+ overrides = {}
+ for odict in self.overridelist:
+ overrides.update(odict)
+
+ import SCons.Defaults
+ env = self.env or SCons.Defaults.DefaultEnvironment()
+ build_env = env.Override(overrides)
+
+ self._memo['get_build_env'] = build_env
+
+ return build_env
+
+ def get_build_scanner_path(self, scanner):
+ """Fetch the scanner path for this executor's targets and sources.
+ """
+ env = self.get_build_env()
+ try:
+ cwd = self.targets[0].cwd
+ except (IndexError, AttributeError):
+ cwd = None
+ return scanner.path(env, cwd, self.targets, self.get_sources())
+
+ def get_kw(self, kw={}):
+ result = self.builder_kw.copy()
+ result.update(kw)
+ return result
+
+ def do_nothing(self, target, kw):
+ return 0
+
+ def do_execute(self, target, kw):
+ """Actually execute the action list."""
+ env = self.get_build_env()
+ kw = self.get_kw(kw)
+ status = 0
+ for act in self.get_action_list():
+ status = apply(act, (self.targets, self.get_sources(), env), kw)
+ if isinstance(status, SCons.Errors.BuildError):
+ status.executor = self
+ raise status
+ elif status:
+ msg = "Error %s" % status
+ raise SCons.Errors.BuildError(
+ errstr=msg,
+ node=self.targets,
+ executor=self,
+ action=act)
+ return status
+
+ # use extra indirection because with new-style objects (Python 2.2
+ # and above) we can't override special methods, and nullify() needs
+ # to be able to do this.
+
+ def __call__(self, target, **kw):
+ return self.do_execute(target, kw)
+
+ def cleanup(self):
+ self._memo = {}
+
+ def add_sources(self, sources):
+ """Add source files to this Executor's list. This is necessary
+ for "multi" Builders that can be called repeatedly to build up
+ a source file list for a given target."""
+ self.sources.extend(sources)
+ self.sources_need_sorting = True
+
+ def get_sources(self):
+ if self.sources_need_sorting:
+ self.sources = SCons.Util.uniquer_hashables(self.sources)
+ self.sources_need_sorting = False
+ return self.sources
+
+ def prepare(self):
+ """
+ Preparatory checks for whether this Executor can go ahead
+ and (try to) build its targets.
+ """
+ for s in self.get_sources():
+ if s.missing():
+ msg = "Source `%s' not found, needed by target `%s'."
+ raise SCons.Errors.StopError, msg % (s, self.targets[0])
+
+ def add_pre_action(self, action):
+ self.pre_actions.append(action)
+
+ def add_post_action(self, action):
+ self.post_actions.append(action)
+
+ # another extra indirection for new-style objects and nullify...
+
+ def my_str(self):
+ env = self.get_build_env()
+ get = lambda action, t=self.targets, s=self.get_sources(), e=env: \
+ action.genstring(t, s, e)
+ return string.join(map(get, self.get_action_list()), "\n")
+
+
+ def __str__(self):
+ return self.my_str()
+
+ def nullify(self):
+ self.cleanup()
+ self.do_execute = self.do_nothing
+ self.my_str = lambda S=self: ''
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_contents'))
+
+ def get_contents(self):
+ """Fetch the signature contents. This is the main reason this
+ class exists, so we can compute this once and cache it regardless
+ of how many target or source Nodes there are.
+ """
+ try:
+ return self._memo['get_contents']
+ except KeyError:
+ pass
+ env = self.get_build_env()
+ get = lambda action, t=self.targets, s=self.get_sources(), e=env: \
+ action.get_contents(t, s, e)
+ result = string.join(map(get, self.get_action_list()), "")
+ self._memo['get_contents'] = result
+ return result
+
+ def get_timestamp(self):
+ """Fetch a time stamp for this Executor. We don't have one, of
+ course (only files do), but this is the interface used by the
+ timestamp module.
+ """
+ return 0
+
+ def scan_targets(self, scanner):
+ self.scan(scanner, self.targets)
+
+ def scan_sources(self, scanner):
+ if self.sources:
+ self.scan(scanner, self.get_sources())
+
+ def scan(self, scanner, node_list):
+ """Scan a list of this Executor's files (targets or sources) for
+ implicit dependencies and update all of the targets with them.
+ This essentially short-circuits an N*M scan of the sources for
+ each individual target, which is a hell of a lot more efficient.
+ """
+ env = self.get_build_env()
+
+ deps = []
+ if scanner:
+ for node in node_list:
+ node.disambiguate()
+ s = scanner.select(node)
+ if not s:
+ continue
+ path = self.get_build_scanner_path(s)
+ deps.extend(node.get_implicit_deps(env, s, path))
+ else:
+ kw = self.get_kw()
+ for node in node_list:
+ node.disambiguate()
+ scanner = node.get_env_scanner(env, kw)
+ if not scanner:
+ continue
+ scanner = scanner.select(node)
+ if not scanner:
+ continue
+ path = self.get_build_scanner_path(scanner)
+ deps.extend(node.get_implicit_deps(env, scanner, path))
+
+ deps.extend(self.get_implicit_deps())
+
+ for tgt in self.targets:
+ tgt.add_to_implicit(deps)
+
+ def _get_unignored_sources_key(self, ignore=()):
+ return tuple(ignore)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key))
+
+ def get_unignored_sources(self, ignore=()):
+ ignore = tuple(ignore)
+ try:
+ memo_dict = self._memo['get_unignored_sources']
+ except KeyError:
+ memo_dict = {}
+ self._memo['get_unignored_sources'] = memo_dict
+ else:
+ try:
+ return memo_dict[ignore]
+ except KeyError:
+ pass
+
+ sourcelist = self.get_sources()
+ if ignore:
+ idict = {}
+ for i in ignore:
+ idict[i] = 1
+ sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist)
+
+ memo_dict[ignore] = sourcelist
+
+ return sourcelist
+
+ def _process_sources_key(self, func, ignore=()):
+ return (func, tuple(ignore))
+
+ memoizer_counters.append(SCons.Memoize.CountDict('process_sources', _process_sources_key))
+
+ def process_sources(self, func, ignore=()):
+ memo_key = (func, tuple(ignore))
+ try:
+ memo_dict = self._memo['process_sources']
+ except KeyError:
+ memo_dict = {}
+ self._memo['process_sources'] = memo_dict
+ else:
+ try:
+ return memo_dict[memo_key]
+ except KeyError:
+ pass
+
+ result = map(func, self.get_unignored_sources(ignore))
+
+ memo_dict[memo_key] = result
+
+ return result
+
+ def get_implicit_deps(self):
+ """Return the executor's implicit dependencies, i.e. the nodes of
+ the commands to be executed."""
+ result = []
+ build_env = self.get_build_env()
+ for act in self.get_action_list():
+ result.extend(act.get_implicit_deps(self.targets, self.get_sources(), build_env))
+ return result
+
+nullenv = None
+
+def get_NullEnvironment():
+ """Use singleton pattern for Null Environments."""
+ global nullenv
+
+ import SCons.Util
+ class NullEnvironment(SCons.Util.Null):
+ import SCons.CacheDir
+ _CacheDir_path = None
+ _CacheDir = SCons.CacheDir.CacheDir(None)
+ def get_CacheDir(self):
+ return self._CacheDir
+
+ if not nullenv:
+ nullenv = NullEnvironment()
+ return nullenv
+
+class Null:
+ """A null Executor, with a null build Environment, that does
+ nothing when the rest of the methods call it.
+
+ This might be able to disapper when we refactor things to
+ disassociate Builders from Nodes entirely, so we're not
+ going to worry about unit tests for this--at least for now.
+ """
+ def __init__(self, *args, **kw):
+ if __debug__: logInstanceCreation(self, 'Executor.Null')
+ self.targets = kw['targets']
+ def get_build_env(self):
+ return get_NullEnvironment()
+ def get_build_scanner_path(self):
+ return None
+ def cleanup(self):
+ pass
+ def prepare(self):
+ pass
+ def get_unignored_sources(self, *args, **kw):
+ return tuple(())
+ def get_action_list(self):
+ return []
+ def __call__(self, *args, **kw):
+ return 0
+ def get_contents(self):
+ return ''
+
+ def _morph(self):
+ """Morph this Null executor to a real Executor object."""
+ self.__class__ = Executor
+ self.__init__([], targets=self.targets)
+
+ # The following methods require morphing this Null Executor to a
+ # real Executor object.
+
+ def add_pre_action(self, action):
+ self._morph()
+ self.add_pre_action(action)
+ def add_post_action(self, action):
+ self._morph()
+ self.add_post_action(action)
+ def set_action_list(self, action):
+ self._morph()
+ self.set_action_list(action)
+
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Job.py b/tools/scons/scons-local-1.2.0/SCons/Job.py
new file mode 100644
index 000000000..bcd39819a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Job.py
@@ -0,0 +1,429 @@
+"""SCons.Job
+
+This module defines the Serial and Parallel classes that execute tasks to
+complete a build. The Jobs class provides a higher level interface to start,
+stop, and wait on jobs.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Job.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import signal
+
+import SCons.Errors
+
+# The default stack size (in kilobytes) of the threads used to execute
+# jobs in parallel.
+#
+# We use a stack size of 256 kilobytes. The default on some platforms
+# is too large and prevents us from creating enough threads to fully
+# parallelized the build. For example, the default stack size on linux
+# is 8 MBytes.
+
+explicit_stack_size = None
+default_stack_size = 256
+
+interrupt_msg = 'Build interrupted.'
+
+
+class InterruptState:
+ def __init__(self):
+ self.interrupted = False
+
+ def set(self):
+ self.interrupted = True
+
+ def __call__(self):
+ return self.interrupted
+
+
+class Jobs:
+ """An instance of this class initializes N jobs, and provides
+ methods for starting, stopping, and waiting on all N jobs.
+ """
+
+ def __init__(self, num, taskmaster):
+ """
+ create 'num' jobs using the given taskmaster.
+
+ If 'num' is 1 or less, then a serial job will be used,
+ otherwise a parallel job with 'num' worker threads will
+ be used.
+
+ The 'num_jobs' attribute will be set to the actual number of jobs
+ allocated. If more than one job is requested but the Parallel
+ class can't do it, it gets reset to 1. Wrapping interfaces that
+ care should check the value of 'num_jobs' after initialization.
+ """
+
+ self.job = None
+ if num > 1:
+ stack_size = explicit_stack_size
+ if stack_size is None:
+ stack_size = default_stack_size
+
+ try:
+ self.job = Parallel(taskmaster, num, stack_size)
+ self.num_jobs = num
+ except NameError:
+ pass
+ if self.job is None:
+ self.job = Serial(taskmaster)
+ self.num_jobs = 1
+
+ def run(self, postfunc=lambda: None):
+ """Run the jobs.
+
+ postfunc() will be invoked after the jobs has run. It will be
+ invoked even if the jobs are interrupted by a keyboard
+ interrupt (well, in fact by a signal such as either SIGINT,
+ SIGTERM or SIGHUP). The execution of postfunc() is protected
+ against keyboard interrupts and is guaranteed to run to
+ completion."""
+ self._setup_sig_handler()
+ try:
+ self.job.start()
+ finally:
+ postfunc()
+ self._reset_sig_handler()
+
+ def were_interrupted(self):
+ """Returns whether the jobs were interrupted by a signal."""
+ return self.job.interrupted()
+
+ def _setup_sig_handler(self):
+ """Setup an interrupt handler so that SCons can shutdown cleanly in
+ various conditions:
+
+ a) SIGINT: Keyboard interrupt
+ b) SIGTERM: kill or system shutdown
+ c) SIGHUP: Controlling shell exiting
+
+ We handle all of these cases by stopping the taskmaster. It
+ turns out that it very difficult to stop the build process
+ by throwing asynchronously an exception such as
+ KeyboardInterrupt. For example, the python Condition
+ variables (threading.Condition) and Queue's do not seem to
+ asynchronous-exception-safe. It would require adding a whole
+ bunch of try/finally block and except KeyboardInterrupt all
+ over the place.
+
+ Note also that we have to be careful to handle the case when
+ SCons forks before executing another process. In that case, we
+ want the child to exit immediately.
+ """
+ def handler(signum, stack, self=self, parentpid=os.getpid()):
+ if os.getpid() == parentpid:
+ self.job.taskmaster.stop()
+ self.job.interrupted.set()
+ else:
+ os._exit(2)
+
+ self.old_sigint = signal.signal(signal.SIGINT, handler)
+ self.old_sigterm = signal.signal(signal.SIGTERM, handler)
+ try:
+ self.old_sighup = signal.signal(signal.SIGHUP, handler)
+ except AttributeError:
+ pass
+
+ def _reset_sig_handler(self):
+ """Restore the signal handlers to their previous state (before the
+ call to _setup_sig_handler()."""
+
+ signal.signal(signal.SIGINT, self.old_sigint)
+ signal.signal(signal.SIGTERM, self.old_sigterm)
+ try:
+ signal.signal(signal.SIGHUP, self.old_sighup)
+ except AttributeError:
+ pass
+
+class Serial:
+ """This class is used to execute tasks in series, and is more efficient
+ than Parallel, but is only appropriate for non-parallel builds. Only
+ one instance of this class should be in existence at a time.
+
+ This class is not thread safe.
+ """
+
+ def __init__(self, taskmaster):
+ """Create a new serial job given a taskmaster.
+
+ The taskmaster's next_task() method should return the next task
+ that needs to be executed, or None if there are no more tasks. The
+ taskmaster's executed() method will be called for each task when it
+ is successfully executed or failed() will be called if it failed to
+ execute (e.g. execute() raised an exception)."""
+
+ self.taskmaster = taskmaster
+ self.interrupted = InterruptState()
+
+ def start(self):
+ """Start the job. This will begin pulling tasks from the taskmaster
+ and executing them, and return when there are no more tasks. If a task
+ fails to execute (i.e. execute() raises an exception), then the job will
+ stop."""
+
+ while 1:
+ task = self.taskmaster.next_task()
+
+ if task is None:
+ break
+
+ try:
+ task.prepare()
+ if task.needs_execute():
+ task.execute()
+ except:
+ if self.interrupted():
+ try:
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ except:
+ task.exception_set()
+ else:
+ task.exception_set()
+
+ # Let the failed() callback function arrange for the
+ # build to stop if that's appropriate.
+ task.failed()
+ else:
+ task.executed()
+
+ task.postprocess()
+ self.taskmaster.cleanup()
+
+
+# Trap import failure so that everything in the Job module but the
+# Parallel class (and its dependent classes) will work if the interpreter
+# doesn't support threads.
+try:
+ import Queue
+ import threading
+except ImportError:
+ pass
+else:
+ class Worker(threading.Thread):
+ """A worker thread waits on a task to be posted to its request queue,
+ dequeues the task, executes it, and posts a tuple including the task
+ and a boolean indicating whether the task executed successfully. """
+
+ def __init__(self, requestQueue, resultsQueue, interrupted):
+ threading.Thread.__init__(self)
+ self.setDaemon(1)
+ self.requestQueue = requestQueue
+ self.resultsQueue = resultsQueue
+ self.interrupted = interrupted
+ self.start()
+
+ def run(self):
+ while 1:
+ task = self.requestQueue.get()
+
+ if task is None:
+ # The "None" value is used as a sentinel by
+ # ThreadPool.cleanup(). This indicates that there
+ # are no more tasks, so we should quit.
+ break
+
+ try:
+ if self.interrupted():
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ task.execute()
+ except:
+ task.exception_set()
+ ok = False
+ else:
+ ok = True
+
+ self.resultsQueue.put((task, ok))
+
+ class ThreadPool:
+ """This class is responsible for spawning and managing worker threads."""
+
+ def __init__(self, num, stack_size, interrupted):
+ """Create the request and reply queues, and 'num' worker threads.
+
+ One must specify the stack size of the worker threads. The
+ stack size is specified in kilobytes.
+ """
+ self.requestQueue = Queue.Queue(0)
+ self.resultsQueue = Queue.Queue(0)
+
+ try:
+ prev_size = threading.stack_size(stack_size*1024)
+ except AttributeError, e:
+ # Only print a warning if the stack size has been
+ # explicitly set.
+ if not explicit_stack_size is None:
+ msg = "Setting stack size is unsupported by this version of Python:\n " + \
+ e.args[0]
+ SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+ except ValueError, e:
+ msg = "Setting stack size failed:\n " + str(e)
+ SCons.Warnings.warn(SCons.Warnings.StackSizeWarning, msg)
+
+ # Create worker threads
+ self.workers = []
+ for _ in range(num):
+ worker = Worker(self.requestQueue, self.resultsQueue, interrupted)
+ self.workers.append(worker)
+
+ # Once we drop Python 1.5 we can change the following to:
+ #if 'prev_size' in locals():
+ if 'prev_size' in locals().keys():
+ threading.stack_size(prev_size)
+
+ def put(self, task):
+ """Put task into request queue."""
+ self.requestQueue.put(task)
+
+ def get(self):
+ """Remove and return a result tuple from the results queue."""
+ return self.resultsQueue.get()
+
+ def preparation_failed(self, task):
+ self.resultsQueue.put((task, False))
+
+ def cleanup(self):
+ """
+ Shuts down the thread pool, giving each worker thread a
+ chance to shut down gracefully.
+ """
+ # For each worker thread, put a sentinel "None" value
+ # on the requestQueue (indicating that there's no work
+ # to be done) so that each worker thread will get one and
+ # terminate gracefully.
+ for _ in self.workers:
+ self.requestQueue.put(None)
+
+ # Wait for all of the workers to terminate.
+ #
+ # If we don't do this, later Python versions (2.4, 2.5) often
+ # seem to raise exceptions during shutdown. This happens
+ # in requestQueue.get(), as an assertion failure that
+ # requestQueue.not_full is notified while not acquired,
+ # seemingly because the main thread has shut down (or is
+ # in the process of doing so) while the workers are still
+ # trying to pull sentinels off the requestQueue.
+ #
+ # Normally these terminations should happen fairly quickly,
+ # but we'll stick a one-second timeout on here just in case
+ # someone gets hung.
+ for worker in self.workers:
+ worker.join(1.0)
+ self.workers = []
+
+ class Parallel:
+ """This class is used to execute tasks in parallel, and is somewhat
+ less efficient than Serial, but is appropriate for parallel builds.
+
+ This class is thread safe.
+ """
+
+ def __init__(self, taskmaster, num, stack_size):
+ """Create a new parallel job given a taskmaster.
+
+ The taskmaster's next_task() method should return the next
+ task that needs to be executed, or None if there are no more
+ tasks. The taskmaster's executed() method will be called
+ for each task when it is successfully executed or failed()
+ will be called if the task failed to execute (i.e. execute()
+ raised an exception).
+
+ Note: calls to taskmaster are serialized, but calls to
+ execute() on distinct tasks are not serialized, because
+ that is the whole point of parallel jobs: they can execute
+ multiple tasks simultaneously. """
+
+ self.taskmaster = taskmaster
+ self.interrupted = InterruptState()
+ self.tp = ThreadPool(num, stack_size, self.interrupted)
+
+ self.maxjobs = num
+
+ def start(self):
+ """Start the job. This will begin pulling tasks from the
+ taskmaster and executing them, and return when there are no
+ more tasks. If a task fails to execute (i.e. execute() raises
+ an exception), then the job will stop."""
+
+ jobs = 0
+
+ while 1:
+ # Start up as many available tasks as we're
+ # allowed to.
+ while jobs < self.maxjobs:
+ task = self.taskmaster.next_task()
+ if task is None:
+ break
+
+ try:
+ # prepare task for execution
+ task.prepare()
+ except:
+ task.exception_set()
+ task.failed()
+ task.postprocess()
+ else:
+ if task.needs_execute():
+ # dispatch task
+ self.tp.put(task)
+ jobs = jobs + 1
+ else:
+ task.executed()
+ task.postprocess()
+
+ if not task and not jobs: break
+
+ # Let any/all completed tasks finish up before we go
+ # back and put the next batch of tasks on the queue.
+ while 1:
+ task, ok = self.tp.get()
+ jobs = jobs - 1
+
+ if ok:
+ task.executed()
+ else:
+ if self.interrupted():
+ try:
+ raise SCons.Errors.BuildError(
+ task.targets[0], errstr=interrupt_msg)
+ except:
+ task.exception_set()
+
+ # Let the failed() callback function arrange
+ # for the build to stop if that's appropriate.
+ task.failed()
+
+ task.postprocess()
+
+ if self.tp.resultsQueue.empty():
+ break
+
+ self.tp.cleanup()
+ self.taskmaster.cleanup()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Memoize.py b/tools/scons/scons-local-1.2.0/SCons/Memoize.py
new file mode 100644
index 000000000..f79dd6b93
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Memoize.py
@@ -0,0 +1,286 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Memoize.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Memoizer
+
+A metaclass implementation to count hits and misses of the computed
+values that various methods cache in memory.
+
+Use of this modules assumes that wrapped methods be coded to cache their
+values in a consistent way. Here is an example of wrapping a method
+that returns a computed value, with no input parameters:
+
+ memoizer_counters = [] # Memoization
+
+ memoizer_counters.append(SCons.Memoize.CountValue('foo')) # Memoization
+
+ def foo(self):
+
+ try: # Memoization
+ return self._memo['foo'] # Memoization
+ except KeyError: # Memoization
+ pass # Memoization
+
+ result = self.compute_foo_value()
+
+ self._memo['foo'] = result # Memoization
+
+ return result
+
+Here is an example of wrapping a method that will return different values
+based on one or more input arguments:
+
+ def _bar_key(self, argument): # Memoization
+ return argument # Memoization
+
+ memoizer_counters.append(SCons.Memoize.CountDict('bar', _bar_key)) # Memoization
+
+ def bar(self, argument):
+
+ memo_key = argument # Memoization
+ try: # Memoization
+ memo_dict = self._memo['bar'] # Memoization
+ except KeyError: # Memoization
+ memo_dict = {} # Memoization
+ self._memo['dict'] = memo_dict # Memoization
+ else: # Memoization
+ try: # Memoization
+ return memo_dict[memo_key] # Memoization
+ except KeyError: # Memoization
+ pass # Memoization
+
+ result = self.compute_bar_value(argument)
+
+ memo_dict[memo_key] = result # Memoization
+
+ return result
+
+At one point we avoided replicating this sort of logic in all the methods
+by putting it right into this module, but we've moved away from that at
+present (see the "Historical Note," below.).
+
+Deciding what to cache is tricky, because different configurations
+can have radically different performance tradeoffs, and because the
+tradeoffs involved are often so non-obvious. Consequently, deciding
+whether or not to cache a given method will likely be more of an art than
+a science, but should still be based on available data from this module.
+Here are some VERY GENERAL guidelines about deciding whether or not to
+cache return values from a method that's being called a lot:
+
+ -- The first question to ask is, "Can we change the calling code
+ so this method isn't called so often?" Sometimes this can be
+ done by changing the algorithm. Sometimes the *caller* should
+ be memoized, not the method you're looking at.
+
+ -- The memoized function should be timed with multiple configurations
+ to make sure it doesn't inadvertently slow down some other
+ configuration.
+
+ -- When memoizing values based on a dictionary key composed of
+ input arguments, you don't need to use all of the arguments
+ if some of them don't affect the return values.
+
+Historical Note: The initial Memoizer implementation actually handled
+the caching of values for the wrapped methods, based on a set of generic
+algorithms for computing hashable values based on the method's arguments.
+This collected caching logic nicely, but had two drawbacks:
+
+ Running arguments through a generic key-conversion mechanism is slower
+ (and less flexible) than just coding these things directly. Since the
+ methods that need memoized values are generally performance-critical,
+ slowing them down in order to collect the logic isn't the right
+ tradeoff.
+
+ Use of the memoizer really obscured what was being called, because
+ all the memoized methods were wrapped with re-used generic methods.
+ This made it more difficult, for example, to use the Python profiler
+ to figure out how to optimize the underlying methods.
+"""
+
+import new
+
+# A flag controlling whether or not we actually use memoization.
+use_memoizer = None
+
+CounterList = []
+
+class Counter:
+ """
+ Base class for counting memoization hits and misses.
+
+ We expect that the metaclass initialization will have filled in
+ the .name attribute that represents the name of the function
+ being counted.
+ """
+ def __init__(self, method_name):
+ """
+ """
+ self.method_name = method_name
+ self.hit = 0
+ self.miss = 0
+ CounterList.append(self)
+ def display(self):
+ fmt = " %7d hits %7d misses %s()"
+ print fmt % (self.hit, self.miss, self.name)
+ def __cmp__(self, other):
+ try:
+ return cmp(self.name, other.name)
+ except AttributeError:
+ return 0
+
+class CountValue(Counter):
+ """
+ A counter class for simple, atomic memoized values.
+
+ A CountValue object should be instantiated in a class for each of
+ the class's methods that memoizes its return value by simply storing
+ the return value in its _memo dictionary.
+
+ We expect that the metaclass initialization will fill in the
+ .underlying_method attribute with the method that we're wrapping.
+ We then call the underlying_method method after counting whether
+ its memoized value has already been set (a hit) or not (a miss).
+ """
+ def __call__(self, *args, **kw):
+ obj = args[0]
+ if obj._memo.has_key(self.method_name):
+ self.hit = self.hit + 1
+ else:
+ self.miss = self.miss + 1
+ return apply(self.underlying_method, args, kw)
+
+class CountDict(Counter):
+ """
+ A counter class for memoized values stored in a dictionary, with
+ keys based on the method's input arguments.
+
+ A CountDict object is instantiated in a class for each of the
+ class's methods that memoizes its return value in a dictionary,
+ indexed by some key that can be computed from one or more of
+ its input arguments.
+
+ We expect that the metaclass initialization will fill in the
+ .underlying_method attribute with the method that we're wrapping.
+ We then call the underlying_method method after counting whether the
+ computed key value is already present in the memoization dictionary
+ (a hit) or not (a miss).
+ """
+ def __init__(self, method_name, keymaker):
+ """
+ """
+ Counter.__init__(self, method_name)
+ self.keymaker = keymaker
+ def __call__(self, *args, **kw):
+ obj = args[0]
+ try:
+ memo_dict = obj._memo[self.method_name]
+ except KeyError:
+ self.miss = self.miss + 1
+ else:
+ key = apply(self.keymaker, args, kw)
+ if memo_dict.has_key(key):
+ self.hit = self.hit + 1
+ else:
+ self.miss = self.miss + 1
+ return apply(self.underlying_method, args, kw)
+
+class Memoizer:
+ """Object which performs caching of method calls for its 'primary'
+ instance."""
+
+ def __init__(self):
+ pass
+
+# Find out if we support metaclasses (Python 2.2 and later).
+
+class M:
+ def __init__(cls, name, bases, cls_dict):
+ cls.use_metaclass = 1
+ def fake_method(self):
+ pass
+ new.instancemethod(fake_method, None, cls)
+
+try:
+ class A:
+ __metaclass__ = M
+
+ use_metaclass = A.use_metaclass
+except AttributeError:
+ use_metaclass = None
+ reason = 'no metaclasses'
+except TypeError:
+ use_metaclass = None
+ reason = 'new.instancemethod() bug'
+else:
+ del A
+
+del M
+
+if not use_metaclass:
+
+ def Dump(title):
+ pass
+
+ try:
+ class Memoized_Metaclass(type):
+ # Just a place-holder so pre-metaclass Python versions don't
+ # have to have special code for the Memoized classes.
+ pass
+ except TypeError:
+ class Memoized_Metaclass:
+ # A place-holder so pre-metaclass Python versions don't
+ # have to have special code for the Memoized classes.
+ pass
+
+ def EnableMemoization():
+ import SCons.Warnings
+ msg = 'memoization is not supported in this version of Python (%s)'
+ raise SCons.Warnings.NoMetaclassSupportWarning, msg % reason
+
+else:
+
+ def Dump(title=None):
+ if title:
+ print title
+ CounterList.sort()
+ for counter in CounterList:
+ counter.display()
+
+ class Memoized_Metaclass(type):
+ def __init__(cls, name, bases, cls_dict):
+ super(Memoized_Metaclass, cls).__init__(name, bases, cls_dict)
+
+ for counter in cls_dict.get('memoizer_counters', []):
+ method_name = counter.method_name
+
+ counter.name = cls.__name__ + '.' + method_name
+ counter.underlying_method = cls_dict[method_name]
+
+ replacement_method = new.instancemethod(counter, None, cls)
+ setattr(cls, method_name, replacement_method)
+
+ def EnableMemoization():
+ global use_memoizer
+ use_memoizer = 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Node/Alias.py b/tools/scons/scons-local-1.2.0/SCons/Node/Alias.py
new file mode 100644
index 000000000..4ce9fff7d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Node/Alias.py
@@ -0,0 +1,147 @@
+
+"""scons.Node.Alias
+
+Alias nodes.
+
+This creates a hash of global Aliases (dummy targets).
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Node/Alias.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+import UserDict
+
+import SCons.Errors
+import SCons.Node
+import SCons.Util
+
+class AliasNameSpace(UserDict.UserDict):
+ def Alias(self, name, **kw):
+ if isinstance(name, SCons.Node.Alias.Alias):
+ return name
+ try:
+ a = self[name]
+ except KeyError:
+ a = apply(SCons.Node.Alias.Alias, (name,), kw)
+ self[name] = a
+ return a
+
+ def lookup(self, name, **kw):
+ try:
+ return self[name]
+ except KeyError:
+ return None
+
+class AliasNodeInfo(SCons.Node.NodeInfoBase):
+ current_version_id = 1
+ field_list = ['csig']
+ def str_to_node(self, s):
+ return default_ans.Alias(s)
+
+class AliasBuildInfo(SCons.Node.BuildInfoBase):
+ current_version_id = 1
+
+class Alias(SCons.Node.Node):
+
+ NodeInfo = AliasNodeInfo
+ BuildInfo = AliasBuildInfo
+
+ def __init__(self, name):
+ SCons.Node.Node.__init__(self)
+ self.name = name
+
+ def str_for_display(self):
+ return '"' + self.__str__() + '"'
+
+ def __str__(self):
+ return self.name
+
+ def make_ready(self):
+ self.get_csig()
+
+ really_build = SCons.Node.Node.build
+ is_up_to_date = SCons.Node.Node.children_are_up_to_date
+
+ def is_under(self, dir):
+ # Make Alias nodes get built regardless of
+ # what directory scons was run from. Alias nodes
+ # are outside the filesystem:
+ return 1
+
+ def get_contents(self):
+ """The contents of an alias is the concatenation
+ of the content signatures of all its sources."""
+ childsigs = map(lambda n: n.get_csig(), self.children())
+ return string.join(childsigs, '')
+
+ def sconsign(self):
+ """An Alias is not recorded in .sconsign files"""
+ pass
+
+ #
+ #
+ #
+
+ def changed_since_last_build(self, target, prev_ni):
+ cur_csig = self.get_csig()
+ try:
+ return cur_csig != prev_ni.csig
+ except AttributeError:
+ return 1
+
+ def build(self):
+ """A "builder" for aliases."""
+ pass
+
+ def convert(self):
+ try: del self.builder
+ except AttributeError: pass
+ self.reset_executor()
+ self.build = self.really_build
+
+ def get_csig(self):
+ """
+ Generate a node's content signature, the digested signature
+ of its content.
+
+ node - the node
+ cache - alternate node to use for the signature cache
+ returns - the content signature
+ """
+ try:
+ return self.ninfo.csig
+ except AttributeError:
+ pass
+
+ contents = self.get_contents()
+ csig = SCons.Util.MD5signature(contents)
+ self.get_ninfo().csig = csig
+ return csig
+
+default_ans = AliasNameSpace()
+
+SCons.Node.arg2nodes_lookups.append(default_ans.lookup)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Node/FS.py b/tools/scons/scons-local-1.2.0/SCons/Node/FS.py
new file mode 100644
index 000000000..15368f029
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Node/FS.py
@@ -0,0 +1,3075 @@
+"""scons.Node.FS
+
+File system nodes.
+
+These Nodes represent the canonical external objects that people think
+of when they think of building software: files and directories.
+
+This holds a "default_fs" variable that should be initialized with an FS
+that can be used by scripts or modules looking for the canonical default.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Node/FS.py 3842 2008/12/20 22:59:52 scons"
+
+import fnmatch
+from itertools import izip
+import os
+import os.path
+import re
+import shutil
+import stat
+import string
+import sys
+import time
+import cStringIO
+
+import SCons.Action
+from SCons.Debug import logInstanceCreation
+import SCons.Errors
+import SCons.Memoize
+import SCons.Node
+import SCons.Node.Alias
+import SCons.Subst
+import SCons.Util
+import SCons.Warnings
+
+from SCons.Debug import Trace
+
+do_store_info = True
+
+
+class EntryProxyAttributeError(AttributeError):
+ """
+ An AttributeError subclass for recording and displaying the name
+ of the underlying Entry involved in an AttributeError exception.
+ """
+ def __init__(self, entry_proxy, attribute):
+ AttributeError.__init__(self)
+ self.entry_proxy = entry_proxy
+ self.attribute = attribute
+ def __str__(self):
+ entry = self.entry_proxy.get()
+ fmt = "%s instance %s has no attribute %s"
+ return fmt % (entry.__class__.__name__,
+ repr(entry.name),
+ repr(self.attribute))
+
+# The max_drift value: by default, use a cached signature value for
+# any file that's been untouched for more than two days.
+default_max_drift = 2*24*60*60
+
+#
+# We stringify these file system Nodes a lot. Turning a file system Node
+# into a string is non-trivial, because the final string representation
+# can depend on a lot of factors: whether it's a derived target or not,
+# whether it's linked to a repository or source directory, and whether
+# there's duplication going on. The normal technique for optimizing
+# calculations like this is to memoize (cache) the string value, so you
+# only have to do the calculation once.
+#
+# A number of the above factors, however, can be set after we've already
+# been asked to return a string for a Node, because a Repository() or
+# VariantDir() call or the like may not occur until later in SConscript
+# files. So this variable controls whether we bother trying to save
+# string values for Nodes. The wrapper interface can set this whenever
+# they're done mucking with Repository and VariantDir and the other stuff,
+# to let this module know it can start returning saved string values
+# for Nodes.
+#
+Save_Strings = None
+
+def save_strings(val):
+ global Save_Strings
+ Save_Strings = val
+
+#
+# Avoid unnecessary function calls by recording a Boolean value that
+# tells us whether or not os.path.splitdrive() actually does anything
+# on this system, and therefore whether we need to bother calling it
+# when looking up path names in various methods below.
+#
+
+do_splitdrive = None
+
+def initialize_do_splitdrive():
+ global do_splitdrive
+ drive, path = os.path.splitdrive('X:/foo')
+ do_splitdrive = not not drive
+
+initialize_do_splitdrive()
+
+#
+
+needs_normpath_check = None
+
+def initialize_normpath_check():
+ """
+ Initialize the normpath_check regular expression.
+
+ This function is used by the unit tests to re-initialize the pattern
+ when testing for behavior with different values of os.sep.
+ """
+ global needs_normpath_check
+ if os.sep == '/':
+ pattern = r'.*/|\.$|\.\.$'
+ else:
+ pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep)
+ needs_normpath_check = re.compile(pattern)
+
+initialize_normpath_check()
+
+#
+# SCons.Action objects for interacting with the outside world.
+#
+# The Node.FS methods in this module should use these actions to
+# create and/or remove files and directories; they should *not* use
+# os.{link,symlink,unlink,mkdir}(), etc., directly.
+#
+# Using these SCons.Action objects ensures that descriptions of these
+# external activities are properly displayed, that the displays are
+# suppressed when the -s (silent) option is used, and (most importantly)
+# the actions are disabled when the the -n option is used, in which case
+# there should be *no* changes to the external file system(s)...
+#
+
+if hasattr(os, 'link'):
+ def _hardlink_func(fs, src, dst):
+ # If the source is a symlink, we can't just hard-link to it
+ # because a relative symlink may point somewhere completely
+ # different. We must disambiguate the symlink and then
+ # hard-link the final destination file.
+ while fs.islink(src):
+ link = fs.readlink(src)
+ if not os.path.isabs(link):
+ src = link
+ else:
+ src = os.path.join(os.path.dirname(src), link)
+ fs.link(src, dst)
+else:
+ _hardlink_func = None
+
+if hasattr(os, 'symlink'):
+ def _softlink_func(fs, src, dst):
+ fs.symlink(src, dst)
+else:
+ _softlink_func = None
+
+def _copy_func(fs, src, dest):
+ shutil.copy2(src, dest)
+ st = fs.stat(src)
+ fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+
+
+Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy',
+ 'hard-copy', 'soft-copy', 'copy']
+
+Link_Funcs = [] # contains the callables of the specified duplication style
+
+def set_duplicate(duplicate):
+ # Fill in the Link_Funcs list according to the argument
+ # (discarding those not available on the platform).
+
+ # Set up the dictionary that maps the argument names to the
+ # underlying implementations. We do this inside this function,
+ # not in the top-level module code, so that we can remap os.link
+ # and os.symlink for testing purposes.
+ link_dict = {
+ 'hard' : _hardlink_func,
+ 'soft' : _softlink_func,
+ 'copy' : _copy_func
+ }
+
+ if not duplicate in Valid_Duplicates:
+ raise SCons.Errors.InternalError, ("The argument of set_duplicate "
+ "should be in Valid_Duplicates")
+ global Link_Funcs
+ Link_Funcs = []
+ for func in string.split(duplicate,'-'):
+ if link_dict[func]:
+ Link_Funcs.append(link_dict[func])
+
+def LinkFunc(target, source, env):
+ # Relative paths cause problems with symbolic links, so
+ # we use absolute paths, which may be a problem for people
+ # who want to move their soft-linked src-trees around. Those
+ # people should use the 'hard-copy' mode, softlinks cannot be
+ # used for that; at least I have no idea how ...
+ src = source[0].abspath
+ dest = target[0].abspath
+ dir, file = os.path.split(dest)
+ if dir and not target[0].fs.isdir(dir):
+ os.makedirs(dir)
+ if not Link_Funcs:
+ # Set a default order of link functions.
+ set_duplicate('hard-soft-copy')
+ fs = source[0].fs
+ # Now link the files with the previously specified order.
+ for func in Link_Funcs:
+ try:
+ func(fs, src, dest)
+ break
+ except (IOError, OSError):
+ # An OSError indicates something happened like a permissions
+ # problem or an attempt to symlink across file-system
+ # boundaries. An IOError indicates something like the file
+ # not existing. In either case, keeping trying additional
+ # functions in the list and only raise an error if the last
+ # one failed.
+ if func == Link_Funcs[-1]:
+ # exception of the last link method (copy) are fatal
+ raise
+ return 0
+
+Link = SCons.Action.Action(LinkFunc, None)
+def LocalString(target, source, env):
+ return 'Local copy of %s from %s' % (target[0], source[0])
+
+LocalCopy = SCons.Action.Action(LinkFunc, LocalString)
+
+def UnlinkFunc(target, source, env):
+ t = target[0]
+ t.fs.unlink(t.abspath)
+ return 0
+
+Unlink = SCons.Action.Action(UnlinkFunc, None)
+
+def MkdirFunc(target, source, env):
+ t = target[0]
+ if not t.exists():
+ t.fs.mkdir(t.abspath)
+ return 0
+
+Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None)
+
+MkdirBuilder = None
+
+def get_MkdirBuilder():
+ global MkdirBuilder
+ if MkdirBuilder is None:
+ import SCons.Builder
+ import SCons.Defaults
+ # "env" will get filled in by Executor.get_build_env()
+ # calling SCons.Defaults.DefaultEnvironment() when necessary.
+ MkdirBuilder = SCons.Builder.Builder(action = Mkdir,
+ env = None,
+ explain = None,
+ is_explicit = None,
+ target_scanner = SCons.Defaults.DirEntryScanner,
+ name = "MkdirBuilder")
+ return MkdirBuilder
+
+class _Null:
+ pass
+
+_null = _Null()
+
+DefaultSCCSBuilder = None
+DefaultRCSBuilder = None
+
+def get_DefaultSCCSBuilder():
+ global DefaultSCCSBuilder
+ if DefaultSCCSBuilder is None:
+ import SCons.Builder
+ # "env" will get filled in by Executor.get_build_env()
+ # calling SCons.Defaults.DefaultEnvironment() when necessary.
+ act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
+ DefaultSCCSBuilder = SCons.Builder.Builder(action = act,
+ env = None,
+ name = "DefaultSCCSBuilder")
+ return DefaultSCCSBuilder
+
+def get_DefaultRCSBuilder():
+ global DefaultRCSBuilder
+ if DefaultRCSBuilder is None:
+ import SCons.Builder
+ # "env" will get filled in by Executor.get_build_env()
+ # calling SCons.Defaults.DefaultEnvironment() when necessary.
+ act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
+ DefaultRCSBuilder = SCons.Builder.Builder(action = act,
+ env = None,
+ name = "DefaultRCSBuilder")
+ return DefaultRCSBuilder
+
+# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem.
+_is_cygwin = sys.platform == "cygwin"
+if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin:
+ def _my_normcase(x):
+ return x
+else:
+ def _my_normcase(x):
+ return string.upper(x)
+
+
+
+class DiskChecker:
+ def __init__(self, type, do, ignore):
+ self.type = type
+ self.do = do
+ self.ignore = ignore
+ self.set_do()
+ def set_do(self):
+ self.__call__ = self.do
+ def set_ignore(self):
+ self.__call__ = self.ignore
+ def set(self, list):
+ if self.type in list:
+ self.set_do()
+ else:
+ self.set_ignore()
+
+def do_diskcheck_match(node, predicate, errorfmt):
+ result = predicate()
+ try:
+ # If calling the predicate() cached a None value from stat(),
+ # remove it so it doesn't interfere with later attempts to
+ # build this Node as we walk the DAG. (This isn't a great way
+ # to do this, we're reaching into an interface that doesn't
+ # really belong to us, but it's all about performance, so
+ # for now we'll just document the dependency...)
+ if node._memo['stat'] is None:
+ del node._memo['stat']
+ except (AttributeError, KeyError):
+ pass
+ if result:
+ raise TypeError, errorfmt % node.abspath
+
+def ignore_diskcheck_match(node, predicate, errorfmt):
+ pass
+
+def do_diskcheck_rcs(node, name):
+ try:
+ rcs_dir = node.rcs_dir
+ except AttributeError:
+ if node.entry_exists_on_disk('RCS'):
+ rcs_dir = node.Dir('RCS')
+ else:
+ rcs_dir = None
+ node.rcs_dir = rcs_dir
+ if rcs_dir:
+ return rcs_dir.entry_exists_on_disk(name+',v')
+ return None
+
+def ignore_diskcheck_rcs(node, name):
+ return None
+
+def do_diskcheck_sccs(node, name):
+ try:
+ sccs_dir = node.sccs_dir
+ except AttributeError:
+ if node.entry_exists_on_disk('SCCS'):
+ sccs_dir = node.Dir('SCCS')
+ else:
+ sccs_dir = None
+ node.sccs_dir = sccs_dir
+ if sccs_dir:
+ return sccs_dir.entry_exists_on_disk('s.'+name)
+ return None
+
+def ignore_diskcheck_sccs(node, name):
+ return None
+
+diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match)
+diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs)
+diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs)
+
+diskcheckers = [
+ diskcheck_match,
+ diskcheck_rcs,
+ diskcheck_sccs,
+]
+
+def set_diskcheck(list):
+ for dc in diskcheckers:
+ dc.set(list)
+
+def diskcheck_types():
+ return map(lambda dc: dc.type, diskcheckers)
+
+
+
+class EntryProxy(SCons.Util.Proxy):
+ def __get_abspath(self):
+ entry = self.get()
+ return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(),
+ entry.name + "_abspath")
+
+ def __get_filebase(self):
+ name = self.get().name
+ return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0],
+ name + "_filebase")
+
+ def __get_suffix(self):
+ name = self.get().name
+ return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1],
+ name + "_suffix")
+
+ def __get_file(self):
+ name = self.get().name
+ return SCons.Subst.SpecialAttrWrapper(name, name + "_file")
+
+ def __get_base_path(self):
+ """Return the file's directory and file name, with the
+ suffix stripped."""
+ entry = self.get()
+ return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0],
+ entry.name + "_base")
+
+ def __get_posix_path(self):
+ """Return the path with / as the path separator,
+ regardless of platform."""
+ if os.sep == '/':
+ return self
+ else:
+ entry = self.get()
+ r = string.replace(entry.get_path(), os.sep, '/')
+ return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
+
+ def __get_windows_path(self):
+ """Return the path with \ as the path separator,
+ regardless of platform."""
+ if os.sep == '\\':
+ return self
+ else:
+ entry = self.get()
+ r = string.replace(entry.get_path(), os.sep, '\\')
+ return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows")
+
+ def __get_srcnode(self):
+ return EntryProxy(self.get().srcnode())
+
+ def __get_srcdir(self):
+ """Returns the directory containing the source node linked to this
+ node via VariantDir(), or the directory of this node if not linked."""
+ return EntryProxy(self.get().srcnode().dir)
+
+ def __get_rsrcnode(self):
+ return EntryProxy(self.get().srcnode().rfile())
+
+ def __get_rsrcdir(self):
+ """Returns the directory containing the source node linked to this
+ node via VariantDir(), or the directory of this node if not linked."""
+ return EntryProxy(self.get().srcnode().rfile().dir)
+
+ def __get_dir(self):
+ return EntryProxy(self.get().dir)
+
+ dictSpecialAttrs = { "base" : __get_base_path,
+ "posix" : __get_posix_path,
+ "windows" : __get_windows_path,
+ "win32" : __get_windows_path,
+ "srcpath" : __get_srcnode,
+ "srcdir" : __get_srcdir,
+ "dir" : __get_dir,
+ "abspath" : __get_abspath,
+ "filebase" : __get_filebase,
+ "suffix" : __get_suffix,
+ "file" : __get_file,
+ "rsrcpath" : __get_rsrcnode,
+ "rsrcdir" : __get_rsrcdir,
+ }
+
+ def __getattr__(self, name):
+ # This is how we implement the "special" attributes
+ # such as base, posix, srcdir, etc.
+ try:
+ attr_function = self.dictSpecialAttrs[name]
+ except KeyError:
+ try:
+ attr = SCons.Util.Proxy.__getattr__(self, name)
+ except AttributeError, e:
+ # Raise our own AttributeError subclass with an
+ # overridden __str__() method that identifies the
+ # name of the entry that caused the exception.
+ raise EntryProxyAttributeError(self, name)
+ return attr
+ else:
+ return attr_function(self)
+
+class Base(SCons.Node.Node):
+ """A generic class for file system entries. This class is for
+ when we don't know yet whether the entry being looked up is a file
+ or a directory. Instances of this class can morph into either
+ Dir or File objects by a later, more precise lookup.
+
+ Note: this class does not define __cmp__ and __hash__ for
+ efficiency reasons. SCons does a lot of comparing of
+ Node.FS.{Base,Entry,File,Dir} objects, so those operations must be
+ as fast as possible, which means we want to use Python's built-in
+ object identity comparisons.
+ """
+
+ memoizer_counters = []
+
+ def __init__(self, name, directory, fs):
+ """Initialize a generic Node.FS.Base object.
+
+ Call the superclass initialization, take care of setting up
+ our relative and absolute paths, identify our parent
+ directory, and indicate that this node should use
+ signatures."""
+ if __debug__: logInstanceCreation(self, 'Node.FS.Base')
+ SCons.Node.Node.__init__(self)
+
+ self.name = name
+ self.suffix = SCons.Util.splitext(name)[1]
+ self.fs = fs
+
+ assert directory, "A directory must be provided"
+
+ self.abspath = directory.entry_abspath(name)
+ self.labspath = directory.entry_labspath(name)
+ if directory.path == '.':
+ self.path = name
+ else:
+ self.path = directory.entry_path(name)
+ if directory.tpath == '.':
+ self.tpath = name
+ else:
+ self.tpath = directory.entry_tpath(name)
+ self.path_elements = directory.path_elements + [self]
+
+ self.dir = directory
+ self.cwd = None # will hold the SConscript directory for target nodes
+ self.duplicate = directory.duplicate
+
+ def str_for_display(self):
+ return '"' + self.__str__() + '"'
+
+ def must_be_same(self, klass):
+ """
+ This node, which already existed, is being looked up as the
+ specified klass. Raise an exception if it isn't.
+ """
+ if self.__class__ is klass or klass is Entry:
+ return
+ raise TypeError, "Tried to lookup %s '%s' as a %s." %\
+ (self.__class__.__name__, self.path, klass.__name__)
+
+ def get_dir(self):
+ return self.dir
+
+ def get_suffix(self):
+ return self.suffix
+
+ def rfile(self):
+ return self
+
+ def __str__(self):
+ """A Node.FS.Base object's string representation is its path
+ name."""
+ global Save_Strings
+ if Save_Strings:
+ return self._save_str()
+ return self._get_str()
+
+ memoizer_counters.append(SCons.Memoize.CountValue('_save_str'))
+
+ def _save_str(self):
+ try:
+ return self._memo['_save_str']
+ except KeyError:
+ pass
+ result = self._get_str()
+ self._memo['_save_str'] = result
+ return result
+
+ def _get_str(self):
+ global Save_Strings
+ if self.duplicate or self.is_derived():
+ return self.get_path()
+ srcnode = self.srcnode()
+ if srcnode.stat() is None and self.stat() is not None:
+ result = self.get_path()
+ else:
+ result = srcnode.get_path()
+ if not Save_Strings:
+ # We're not at the point where we're saving the string string
+ # representations of FS Nodes (because we haven't finished
+ # reading the SConscript files and need to have str() return
+ # things relative to them). That also means we can't yet
+ # cache values returned (or not returned) by stat(), since
+ # Python code in the SConscript files might still create
+ # or otherwise affect the on-disk file. So get rid of the
+ # values that the underlying stat() method saved.
+ try: del self._memo['stat']
+ except KeyError: pass
+ if self is not srcnode:
+ try: del srcnode._memo['stat']
+ except KeyError: pass
+ return result
+
+ rstr = __str__
+
+ memoizer_counters.append(SCons.Memoize.CountValue('stat'))
+
+ def stat(self):
+ try: return self._memo['stat']
+ except KeyError: pass
+ try: result = self.fs.stat(self.abspath)
+ except os.error: result = None
+ self._memo['stat'] = result
+ return result
+
+ def exists(self):
+ return self.stat() is not None
+
+ def rexists(self):
+ return self.rfile().exists()
+
+ def getmtime(self):
+ st = self.stat()
+ if st: return st[stat.ST_MTIME]
+ else: return None
+
+ def getsize(self):
+ st = self.stat()
+ if st: return st[stat.ST_SIZE]
+ else: return None
+
+ def isdir(self):
+ st = self.stat()
+ return st is not None and stat.S_ISDIR(st[stat.ST_MODE])
+
+ def isfile(self):
+ st = self.stat()
+ return st is not None and stat.S_ISREG(st[stat.ST_MODE])
+
+ if hasattr(os, 'symlink'):
+ def islink(self):
+ try: st = self.fs.lstat(self.abspath)
+ except os.error: return 0
+ return stat.S_ISLNK(st[stat.ST_MODE])
+ else:
+ def islink(self):
+ return 0 # no symlinks
+
+ def is_under(self, dir):
+ if self is dir:
+ return 1
+ else:
+ return self.dir.is_under(dir)
+
+ def set_local(self):
+ self._local = 1
+
+ def srcnode(self):
+ """If this node is in a build path, return the node
+ corresponding to its source file. Otherwise, return
+ ourself.
+ """
+ srcdir_list = self.dir.srcdir_list()
+ if srcdir_list:
+ srcnode = srcdir_list[0].Entry(self.name)
+ srcnode.must_be_same(self.__class__)
+ return srcnode
+ return self
+
+ def get_path(self, dir=None):
+ """Return path relative to the current working directory of the
+ Node.FS.Base object that owns us."""
+ if not dir:
+ dir = self.fs.getcwd()
+ if self == dir:
+ return '.'
+ path_elems = self.path_elements
+ try: i = path_elems.index(dir)
+ except ValueError: pass
+ else: path_elems = path_elems[i+1:]
+ path_elems = map(lambda n: n.name, path_elems)
+ return string.join(path_elems, os.sep)
+
+ def set_src_builder(self, builder):
+ """Set the source code builder for this node."""
+ self.sbuilder = builder
+ if not self.has_builder():
+ self.builder_set(builder)
+
+ def src_builder(self):
+ """Fetch the source code builder for this node.
+
+ If there isn't one, we cache the source code builder specified
+ for the directory (which in turn will cache the value from its
+ parent directory, and so on up to the file system root).
+ """
+ try:
+ scb = self.sbuilder
+ except AttributeError:
+ scb = self.dir.src_builder()
+ self.sbuilder = scb
+ return scb
+
+ def get_abspath(self):
+ """Get the absolute path of the file."""
+ return self.abspath
+
+ def for_signature(self):
+ # Return just our name. Even an absolute path would not work,
+ # because that can change thanks to symlinks or remapped network
+ # paths.
+ return self.name
+
+ def get_subst_proxy(self):
+ try:
+ return self._proxy
+ except AttributeError:
+ ret = EntryProxy(self)
+ self._proxy = ret
+ return ret
+
+ def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext):
+ """
+
+ Generates a target entry that corresponds to this entry (usually
+ a source file) with the specified prefix and suffix.
+
+ Note that this method can be overridden dynamically for generated
+ files that need different behavior. See Tool/swig.py for
+ an example.
+ """
+ return self.dir.Entry(prefix + splitext(self.name)[0] + suffix)
+
+ def _Rfindalldirs_key(self, pathlist):
+ return pathlist
+
+ memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key))
+
+ def Rfindalldirs(self, pathlist):
+ """
+ Return all of the directories for a given path list, including
+ corresponding "backing" directories in any repositories.
+
+ The Node lookups are relative to this Node (typically a
+ directory), so memoizing result saves cycles from looking
+ up the same path for each target in a given directory.
+ """
+ try:
+ memo_dict = self._memo['Rfindalldirs']
+ except KeyError:
+ memo_dict = {}
+ self._memo['Rfindalldirs'] = memo_dict
+ else:
+ try:
+ return memo_dict[pathlist]
+ except KeyError:
+ pass
+
+ create_dir_relative_to_self = self.Dir
+ result = []
+ for path in pathlist:
+ if isinstance(path, SCons.Node.Node):
+ result.append(path)
+ else:
+ dir = create_dir_relative_to_self(path)
+ result.extend(dir.get_all_rdirs())
+
+ memo_dict[pathlist] = result
+
+ return result
+
+ def RDirs(self, pathlist):
+ """Search for a list of directories in the Repository list."""
+ cwd = self.cwd or self.fs._cwd
+ return cwd.Rfindalldirs(pathlist)
+
+ memoizer_counters.append(SCons.Memoize.CountValue('rentry'))
+
+ def rentry(self):
+ try:
+ return self._memo['rentry']
+ except KeyError:
+ pass
+ result = self
+ if not self.exists():
+ norm_name = _my_normcase(self.name)
+ for dir in self.dir.get_all_rdirs():
+ try:
+ node = dir.entries[norm_name]
+ except KeyError:
+ if dir.entry_exists_on_disk(self.name):
+ result = dir.Entry(self.name)
+ break
+ self._memo['rentry'] = result
+ return result
+
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ return []
+
+class Entry(Base):
+ """This is the class for generic Node.FS entries--that is, things
+ that could be a File or a Dir, but we're just not sure yet.
+ Consequently, the methods in this class really exist just to
+ transform their associated object into the right class when the
+ time comes, and then call the same-named method in the transformed
+ class."""
+
+ def diskcheck_match(self):
+ pass
+
+ def disambiguate(self, must_exist=None):
+ """
+ """
+ if self.isdir():
+ self.__class__ = Dir
+ self._morph()
+ elif self.isfile():
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ else:
+ # There was nothing on-disk at this location, so look in
+ # the src directory.
+ #
+ # We can't just use self.srcnode() straight away because
+ # that would create an actual Node for this file in the src
+ # directory, and there might not be one. Instead, use the
+ # dir_on_disk() method to see if there's something on-disk
+ # with that name, in which case we can go ahead and call
+ # self.srcnode() to create the right type of entry.
+ srcdir = self.dir.srcnode()
+ if srcdir != self.dir and \
+ srcdir.entry_exists_on_disk(self.name) and \
+ self.srcnode().isdir():
+ self.__class__ = Dir
+ self._morph()
+ elif must_exist:
+ msg = "No such file or directory: '%s'" % self.abspath
+ raise SCons.Errors.UserError, msg
+ else:
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return self
+
+ def rfile(self):
+ """We're a generic Entry, but the caller is actually looking for
+ a File at this point, so morph into one."""
+ self.__class__ = File
+ self._morph()
+ self.clear()
+ return File.rfile(self)
+
+ def scanner_key(self):
+ return self.get_suffix()
+
+ def get_contents(self):
+ """Fetch the contents of the entry.
+
+ Since this should return the real contents from the file
+ system, we check to see into what sort of subclass we should
+ morph this Entry."""
+ try:
+ self = self.disambiguate(must_exist=1)
+ except SCons.Errors.UserError:
+ # There was nothing on disk with which to disambiguate
+ # this entry. Leave it as an Entry, but return a null
+ # string so calls to get_contents() in emitters and the
+ # like (e.g. in qt.py) don't have to disambiguate by hand
+ # or catch the exception.
+ return ''
+ else:
+ return self.get_contents()
+
+ def must_be_same(self, klass):
+ """Called to make sure a Node is a Dir. Since we're an
+ Entry, we can morph into one."""
+ if self.__class__ is not klass:
+ self.__class__ = klass
+ self._morph()
+ self.clear()
+
+ # The following methods can get called before the Taskmaster has
+ # had a chance to call disambiguate() directly to see if this Entry
+ # should really be a Dir or a File. We therefore use these to call
+ # disambiguate() transparently (from our caller's point of view).
+ #
+ # Right now, this minimal set of methods has been derived by just
+ # looking at some of the methods that will obviously be called early
+ # in any of the various Taskmasters' calling sequences, and then
+ # empirically figuring out which additional methods are necessary
+ # to make various tests pass.
+
+ def exists(self):
+ """Return if the Entry exists. Check the file system to see
+ what we should turn into first. Assume a file if there's no
+ directory."""
+ return self.disambiguate().exists()
+
+ def rel_path(self, other):
+ d = self.disambiguate()
+ if d.__class__ is Entry:
+ raise "rel_path() could not disambiguate File/Dir"
+ return d.rel_path(other)
+
+ def new_ninfo(self):
+ return self.disambiguate().new_ninfo()
+
+ def changed_since_last_build(self, target, prev_ni):
+ return self.disambiguate().changed_since_last_build(target, prev_ni)
+
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ return self.disambiguate()._glob1(pattern, ondisk, source, strings)
+
+# This is for later so we can differentiate between Entry the class and Entry
+# the method of the FS class.
+_classEntry = Entry
+
+
+class LocalFS:
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ # This class implements an abstraction layer for operations involving
+ # a local file system. Essentially, this wraps any function in
+ # the os, os.path or shutil modules that we use to actually go do
+ # anything with or to the local file system.
+ #
+ # Note that there's a very good chance we'll refactor this part of
+ # the architecture in some way as we really implement the interface(s)
+ # for remote file system Nodes. For example, the right architecture
+ # might be to have this be a subclass instead of a base class.
+ # Nevertheless, we're using this as a first step in that direction.
+ #
+ # We're not using chdir() yet because the calling subclass method
+ # needs to use os.chdir() directly to avoid recursion. Will we
+ # really need this one?
+ #def chdir(self, path):
+ # return os.chdir(path)
+ def chmod(self, path, mode):
+ return os.chmod(path, mode)
+ def copy(self, src, dst):
+ return shutil.copy(src, dst)
+ def copy2(self, src, dst):
+ return shutil.copy2(src, dst)
+ def exists(self, path):
+ return os.path.exists(path)
+ def getmtime(self, path):
+ return os.path.getmtime(path)
+ def getsize(self, path):
+ return os.path.getsize(path)
+ def isdir(self, path):
+ return os.path.isdir(path)
+ def isfile(self, path):
+ return os.path.isfile(path)
+ def link(self, src, dst):
+ return os.link(src, dst)
+ def lstat(self, path):
+ return os.lstat(path)
+ def listdir(self, path):
+ return os.listdir(path)
+ def makedirs(self, path):
+ return os.makedirs(path)
+ def mkdir(self, path):
+ return os.mkdir(path)
+ def rename(self, old, new):
+ return os.rename(old, new)
+ def stat(self, path):
+ return os.stat(path)
+ def symlink(self, src, dst):
+ return os.symlink(src, dst)
+ def open(self, path):
+ return open(path)
+ def unlink(self, path):
+ return os.unlink(path)
+
+ if hasattr(os, 'symlink'):
+ def islink(self, path):
+ return os.path.islink(path)
+ else:
+ def islink(self, path):
+ return 0 # no symlinks
+
+ if hasattr(os, 'readlink'):
+ def readlink(self, file):
+ return os.readlink(file)
+ else:
+ def readlink(self, file):
+ return ''
+
+
+#class RemoteFS:
+# # Skeleton for the obvious methods we might need from the
+# # abstraction layer for a remote filesystem.
+# def upload(self, local_src, remote_dst):
+# pass
+# def download(self, remote_src, local_dst):
+# pass
+
+
+class FS(LocalFS):
+
+ memoizer_counters = []
+
+ def __init__(self, path = None):
+ """Initialize the Node.FS subsystem.
+
+ The supplied path is the top of the source tree, where we
+ expect to find the top-level build file. If no path is
+ supplied, the current directory is the default.
+
+ The path argument must be a valid absolute path.
+ """
+ if __debug__: logInstanceCreation(self, 'Node.FS')
+
+ self._memo = {}
+
+ self.Root = {}
+ self.SConstruct_dir = None
+ self.max_drift = default_max_drift
+
+ self.Top = None
+ if path is None:
+ self.pathTop = os.getcwd()
+ else:
+ self.pathTop = path
+ self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0])
+
+ self.Top = self.Dir(self.pathTop)
+ self.Top.path = '.'
+ self.Top.tpath = '.'
+ self._cwd = self.Top
+
+ DirNodeInfo.fs = self
+ FileNodeInfo.fs = self
+
+ def set_SConstruct_dir(self, dir):
+ self.SConstruct_dir = dir
+
+ def get_max_drift(self):
+ return self.max_drift
+
+ def set_max_drift(self, max_drift):
+ self.max_drift = max_drift
+
+ def getcwd(self):
+ return self._cwd
+
+ def chdir(self, dir, change_os_dir=0):
+ """Change the current working directory for lookups.
+ If change_os_dir is true, we will also change the "real" cwd
+ to match.
+ """
+ curr=self._cwd
+ try:
+ if dir is not None:
+ self._cwd = dir
+ if change_os_dir:
+ os.chdir(dir.abspath)
+ except OSError:
+ self._cwd = curr
+ raise
+
+ def get_root(self, drive):
+ """
+ Returns the root directory for the specified drive, creating
+ it if necessary.
+ """
+ drive = _my_normcase(drive)
+ try:
+ return self.Root[drive]
+ except KeyError:
+ root = RootDir(drive, self)
+ self.Root[drive] = root
+ if not drive:
+ self.Root[self.defaultDrive] = root
+ elif drive == self.defaultDrive:
+ self.Root[''] = root
+ return root
+
+ def _lookup(self, p, directory, fsclass, create=1):
+ """
+ The generic entry point for Node lookup with user-supplied data.
+
+ This translates arbitrary input into a canonical Node.FS object
+ of the specified fsclass. The general approach for strings is
+ to turn it into a fully normalized absolute path and then call
+ the root directory's lookup_abs() method for the heavy lifting.
+
+ If the path name begins with '#', it is unconditionally
+ interpreted relative to the top-level directory of this FS. '#'
+ is treated as a synonym for the top-level SConstruct directory,
+ much like '~' is treated as a synonym for the user's home
+ directory in a UNIX shell. So both '#foo' and '#/foo' refer
+ to the 'foo' subdirectory underneath the top-level SConstruct
+ directory.
+
+ If the path name is relative, then the path is looked up relative
+ to the specified directory, or the current directory (self._cwd,
+ typically the SConscript directory) if the specified directory
+ is None.
+ """
+ if isinstance(p, Base):
+ # It's already a Node.FS object. Make sure it's the right
+ # class and return.
+ p.must_be_same(fsclass)
+ return p
+ # str(p) in case it's something like a proxy object
+ p = str(p)
+
+ initial_hash = (p[0:1] == '#')
+ if initial_hash:
+ # There was an initial '#', so we strip it and override
+ # whatever directory they may have specified with the
+ # top-level SConstruct directory.
+ p = p[1:]
+ directory = self.Top
+
+ if directory and not isinstance(directory, Dir):
+ directory = self.Dir(directory)
+
+ if do_splitdrive:
+ drive, p = os.path.splitdrive(p)
+ else:
+ drive = ''
+ if drive and not p:
+ # This causes a naked drive letter to be treated as a synonym
+ # for the root directory on that drive.
+ p = os.sep
+ absolute = os.path.isabs(p)
+
+ needs_normpath = needs_normpath_check.match(p)
+
+ if initial_hash or not absolute:
+ # This is a relative lookup, either to the top-level
+ # SConstruct directory (because of the initial '#') or to
+ # the current directory (the path name is not absolute).
+ # Add the string to the appropriate directory lookup path,
+ # after which the whole thing gets normalized.
+ if not directory:
+ directory = self._cwd
+ if p:
+ p = directory.labspath + '/' + p
+ else:
+ p = directory.labspath
+
+ if needs_normpath:
+ p = os.path.normpath(p)
+
+ if drive or absolute:
+ root = self.get_root(drive)
+ else:
+ if not directory:
+ directory = self._cwd
+ root = directory.root
+
+ if os.sep != '/':
+ p = string.replace(p, os.sep, '/')
+ return root._lookup_abs(p, fsclass, create)
+
+ def Entry(self, name, directory = None, create = 1):
+ """Look up or create a generic Entry node with the specified name.
+ If the name is a relative path (begins with ./, ../, or a file
+ name), then it is looked up relative to the supplied directory
+ node, or to the top level directory of the FS (supplied at
+ construction time) if no directory is supplied.
+ """
+ return self._lookup(name, directory, Entry, create)
+
+ def File(self, name, directory = None, create = 1):
+ """Look up or create a File node with the specified name. If
+ the name is a relative path (begins with ./, ../, or a file name),
+ then it is looked up relative to the supplied directory node,
+ or to the top level directory of the FS (supplied at construction
+ time) if no directory is supplied.
+
+ This method will raise TypeError if a directory is found at the
+ specified path.
+ """
+ return self._lookup(name, directory, File, create)
+
+ def Dir(self, name, directory = None, create = True):
+ """Look up or create a Dir node with the specified name. If
+ the name is a relative path (begins with ./, ../, or a file name),
+ then it is looked up relative to the supplied directory node,
+ or to the top level directory of the FS (supplied at construction
+ time) if no directory is supplied.
+
+ This method will raise TypeError if a normal file is found at the
+ specified path.
+ """
+ return self._lookup(name, directory, Dir, create)
+
+ def VariantDir(self, variant_dir, src_dir, duplicate=1):
+ """Link the supplied variant directory to the source directory
+ for purposes of building files."""
+
+ if not isinstance(src_dir, SCons.Node.Node):
+ src_dir = self.Dir(src_dir)
+ if not isinstance(variant_dir, SCons.Node.Node):
+ variant_dir = self.Dir(variant_dir)
+ if src_dir.is_under(variant_dir):
+ raise SCons.Errors.UserError, "Source directory cannot be under variant directory."
+ if variant_dir.srcdir:
+ if variant_dir.srcdir == src_dir:
+ return # We already did this.
+ raise SCons.Errors.UserError, "'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)
+ variant_dir.link(src_dir, duplicate)
+
+ def Repository(self, *dirs):
+ """Specify Repository directories to search."""
+ for d in dirs:
+ if not isinstance(d, SCons.Node.Node):
+ d = self.Dir(d)
+ self.Top.addRepository(d)
+
+ def variant_dir_target_climb(self, orig, dir, tail):
+ """Create targets in corresponding variant directories
+
+ Climb the directory tree, and look up path names
+ relative to any linked variant directories we find.
+
+ Even though this loops and walks up the tree, we don't memoize
+ the return value because this is really only used to process
+ the command-line targets.
+ """
+ targets = []
+ message = None
+ fmt = "building associated VariantDir targets: %s"
+ start_dir = dir
+ while dir:
+ for bd in dir.variant_dirs:
+ if start_dir.is_under(bd):
+ # If already in the build-dir location, don't reflect
+ return [orig], fmt % str(orig)
+ p = apply(os.path.join, [bd.path] + tail)
+ targets.append(self.Entry(p))
+ tail = [dir.name] + tail
+ dir = dir.up()
+ if targets:
+ message = fmt % string.join(map(str, targets))
+ return targets, message
+
+ def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None):
+ """
+ Globs
+
+ This is mainly a shim layer
+ """
+ if cwd is None:
+ cwd = self.getcwd()
+ return cwd.glob(pathname, ondisk, source, strings)
+
+class DirNodeInfo(SCons.Node.NodeInfoBase):
+ # This should get reset by the FS initialization.
+ current_version_id = 1
+
+ fs = None
+
+ def str_to_node(self, s):
+ top = self.fs.Top
+ root = top.root
+ if do_splitdrive:
+ drive, s = os.path.splitdrive(s)
+ if drive:
+ root = self.fs.get_root(drive)
+ if not os.path.isabs(s):
+ s = top.labspath + '/' + s
+ return root._lookup_abs(s, Entry)
+
+class DirBuildInfo(SCons.Node.BuildInfoBase):
+ current_version_id = 1
+
+glob_magic_check = re.compile('[*?[]')
+
+def has_glob_magic(s):
+ return glob_magic_check.search(s) is not None
+
+class Dir(Base):
+ """A class for directories in a file system.
+ """
+
+ memoizer_counters = []
+
+ NodeInfo = DirNodeInfo
+ BuildInfo = DirBuildInfo
+
+ def __init__(self, name, directory, fs):
+ if __debug__: logInstanceCreation(self, 'Node.FS.Dir')
+ Base.__init__(self, name, directory, fs)
+ self._morph()
+
+ def _morph(self):
+ """Turn a file system Node (either a freshly initialized directory
+ object or a separate Entry object) into a proper directory object.
+
+ Set up this directory's entries and hook it into the file
+ system tree. Specify that directories (this Node) don't use
+ signatures for calculating whether they're current.
+ """
+
+ self.repositories = []
+ self.srcdir = None
+
+ self.entries = {}
+ self.entries['.'] = self
+ self.entries['..'] = self.dir
+ self.cwd = self
+ self.searched = 0
+ self._sconsign = None
+ self.variant_dirs = []
+ self.root = self.dir.root
+
+ # Don't just reset the executor, replace its action list,
+ # because it might have some pre-or post-actions that need to
+ # be preserved.
+ self.builder = get_MkdirBuilder()
+ self.get_executor().set_action_list(self.builder.action)
+
+ def diskcheck_match(self):
+ diskcheck_match(self, self.isfile,
+ "File %s found where directory expected.")
+
+ def __clearRepositoryCache(self, duplicate=None):
+ """Called when we change the repository(ies) for a directory.
+ This clears any cached information that is invalidated by changing
+ the repository."""
+
+ for node in self.entries.values():
+ if node != self.dir:
+ if node != self and isinstance(node, Dir):
+ node.__clearRepositoryCache(duplicate)
+ else:
+ node.clear()
+ try:
+ del node._srcreps
+ except AttributeError:
+ pass
+ if duplicate is not None:
+ node.duplicate=duplicate
+
+ def __resetDuplicate(self, node):
+ if node != self:
+ node.duplicate = node.get_dir().duplicate
+
+ def Entry(self, name):
+ """
+ Looks up or creates an entry node named 'name' relative to
+ this directory.
+ """
+ return self.fs.Entry(name, self)
+
+ def Dir(self, name, create=True):
+ """
+ Looks up or creates a directory node named 'name' relative to
+ this directory.
+ """
+ return self.fs.Dir(name, self, create)
+
+ def File(self, name):
+ """
+ Looks up or creates a file node named 'name' relative to
+ this directory.
+ """
+ return self.fs.File(name, self)
+
+ def _lookup_rel(self, name, klass, create=1):
+ """
+ Looks up a *normalized* relative path name, relative to this
+ directory.
+
+ This method is intended for use by internal lookups with
+ already-normalized path data. For general-purpose lookups,
+ use the Entry(), Dir() and File() methods above.
+
+ This method does *no* input checking and will die or give
+ incorrect results if it's passed a non-normalized path name (e.g.,
+ a path containing '..'), an absolute path name, a top-relative
+ ('#foo') path name, or any kind of object.
+ """
+ name = self.entry_labspath(name)
+ return self.root._lookup_abs(name, klass, create)
+
+ def link(self, srcdir, duplicate):
+ """Set this directory as the variant directory for the
+ supplied source directory."""
+ self.srcdir = srcdir
+ self.duplicate = duplicate
+ self.__clearRepositoryCache(duplicate)
+ srcdir.variant_dirs.append(self)
+
+ def getRepositories(self):
+ """Returns a list of repositories for this directory.
+ """
+ if self.srcdir and not self.duplicate:
+ return self.srcdir.get_all_rdirs() + self.repositories
+ return self.repositories
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs'))
+
+ def get_all_rdirs(self):
+ try:
+ return list(self._memo['get_all_rdirs'])
+ except KeyError:
+ pass
+
+ result = [self]
+ fname = '.'
+ dir = self
+ while dir:
+ for rep in dir.getRepositories():
+ result.append(rep.Dir(fname))
+ if fname == '.':
+ fname = dir.name
+ else:
+ fname = dir.name + os.sep + fname
+ dir = dir.up()
+
+ self._memo['get_all_rdirs'] = list(result)
+
+ return result
+
+ def addRepository(self, dir):
+ if dir != self and not dir in self.repositories:
+ self.repositories.append(dir)
+ dir.tpath = '.'
+ self.__clearRepositoryCache()
+
+ def up(self):
+ return self.entries['..']
+
+ def _rel_path_key(self, other):
+ return str(other)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key))
+
+ def rel_path(self, other):
+ """Return a path to "other" relative to this directory.
+ """
+
+ # This complicated and expensive method, which constructs relative
+ # paths between arbitrary Node.FS objects, is no longer used
+ # by SCons itself. It was introduced to store dependency paths
+ # in .sconsign files relative to the target, but that ended up
+ # being significantly inefficient.
+ #
+ # We're continuing to support the method because some SConstruct
+ # files out there started using it when it was available, and
+ # we're all about backwards compatibility..
+
+ try:
+ memo_dict = self._memo['rel_path']
+ except KeyError:
+ memo_dict = {}
+ self._memo['rel_path'] = memo_dict
+ else:
+ try:
+ return memo_dict[other]
+ except KeyError:
+ pass
+
+ if self is other:
+ result = '.'
+
+ elif not other in self.path_elements:
+ try:
+ other_dir = other.get_dir()
+ except AttributeError:
+ result = str(other)
+ else:
+ if other_dir is None:
+ result = other.name
+ else:
+ dir_rel_path = self.rel_path(other_dir)
+ if dir_rel_path == '.':
+ result = other.name
+ else:
+ result = dir_rel_path + os.sep + other.name
+ else:
+ i = self.path_elements.index(other) + 1
+
+ path_elems = ['..'] * (len(self.path_elements) - i) \
+ + map(lambda n: n.name, other.path_elements[i:])
+
+ result = string.join(path_elems, os.sep)
+
+ memo_dict[other] = result
+
+ return result
+
+ def get_env_scanner(self, env, kw={}):
+ import SCons.Defaults
+ return SCons.Defaults.DirEntryScanner
+
+ def get_target_scanner(self):
+ import SCons.Defaults
+ return SCons.Defaults.DirEntryScanner
+
+ def get_found_includes(self, env, scanner, path):
+ """Return this directory's implicit dependencies.
+
+ We don't bother caching the results because the scan typically
+ shouldn't be requested more than once (as opposed to scanning
+ .h file contents, which can be requested as many times as the
+ files is #included by other files).
+ """
+ if not scanner:
+ return []
+ # Clear cached info for this Dir. If we already visited this
+ # directory on our walk down the tree (because we didn't know at
+ # that point it was being used as the source for another Node)
+ # then we may have calculated build signature before realizing
+ # we had to scan the disk. Now that we have to, though, we need
+ # to invalidate the old calculated signature so that any node
+ # dependent on our directory structure gets one that includes
+ # info about everything on disk.
+ self.clear()
+ return scanner(self, env, path)
+
+ #
+ # Taskmaster interface subsystem
+ #
+
+ def prepare(self):
+ pass
+
+ def build(self, **kw):
+ """A null "builder" for directories."""
+ global MkdirBuilder
+ if self.builder is not MkdirBuilder:
+ apply(SCons.Node.Node.build, [self,], kw)
+
+ #
+ #
+ #
+
+ def _create(self):
+ """Create this directory, silently and without worrying about
+ whether the builder is the default or not."""
+ listDirs = []
+ parent = self
+ while parent:
+ if parent.exists():
+ break
+ listDirs.append(parent)
+ parent = parent.up()
+ else:
+ raise SCons.Errors.StopError, parent.path
+ listDirs.reverse()
+ for dirnode in listDirs:
+ try:
+ # Don't call dirnode.build(), call the base Node method
+ # directly because we definitely *must* create this
+ # directory. The dirnode.build() method will suppress
+ # the build if it's the default builder.
+ SCons.Node.Node.build(dirnode)
+ dirnode.get_executor().nullify()
+ # The build() action may or may not have actually
+ # created the directory, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ dirnode.clear()
+ except OSError:
+ pass
+
+ def multiple_side_effect_has_builder(self):
+ global MkdirBuilder
+ return self.builder is not MkdirBuilder and self.has_builder()
+
+ def alter_targets(self):
+ """Return any corresponding targets in a variant directory.
+ """
+ return self.fs.variant_dir_target_climb(self, self, [])
+
+ def scanner_key(self):
+ """A directory does not get scanned."""
+ return None
+
+ def get_contents(self):
+ """Return content signatures and names of all our children
+ separated by new-lines. Ensure that the nodes are sorted."""
+ contents = []
+ name_cmp = lambda a, b: cmp(a.name, b.name)
+ sorted_children = self.children()[:]
+ sorted_children.sort(name_cmp)
+ for node in sorted_children:
+ contents.append('%s %s\n' % (node.get_csig(), node.name))
+ return string.join(contents, '')
+
+ def get_csig(self):
+ """Compute the content signature for Directory nodes. In
+ general, this is not needed and the content signature is not
+ stored in the DirNodeInfo. However, if get_contents on a Dir
+ node is called which has a child directory, the child
+ directory should return the hash of its contents."""
+ contents = self.get_contents()
+ return SCons.Util.MD5signature(contents)
+
+ def do_duplicate(self, src):
+ pass
+
+ changed_since_last_build = SCons.Node.Node.state_has_changed
+
+ def is_up_to_date(self):
+ """If any child is not up-to-date, then this directory isn't,
+ either."""
+ if self.builder is not MkdirBuilder and not self.exists():
+ return 0
+ up_to_date = SCons.Node.up_to_date
+ for kid in self.children():
+ if kid.get_state() > up_to_date:
+ return 0
+ return 1
+
+ def rdir(self):
+ if not self.exists():
+ norm_name = _my_normcase(self.name)
+ for dir in self.dir.get_all_rdirs():
+ try: node = dir.entries[norm_name]
+ except KeyError: node = dir.dir_on_disk(self.name)
+ if node and node.exists() and \
+ (isinstance(dir, Dir) or isinstance(dir, Entry)):
+ return node
+ return self
+
+ def sconsign(self):
+ """Return the .sconsign file info for this directory,
+ creating it first if necessary."""
+ if not self._sconsign:
+ import SCons.SConsign
+ self._sconsign = SCons.SConsign.ForDirectory(self)
+ return self._sconsign
+
+ def srcnode(self):
+ """Dir has a special need for srcnode()...if we
+ have a srcdir attribute set, then that *is* our srcnode."""
+ if self.srcdir:
+ return self.srcdir
+ return Base.srcnode(self)
+
+ def get_timestamp(self):
+ """Return the latest timestamp from among our children"""
+ stamp = 0
+ for kid in self.children():
+ if kid.get_timestamp() > stamp:
+ stamp = kid.get_timestamp()
+ return stamp
+
+ def entry_abspath(self, name):
+ return self.abspath + os.sep + name
+
+ def entry_labspath(self, name):
+ return self.labspath + '/' + name
+
+ def entry_path(self, name):
+ return self.path + os.sep + name
+
+ def entry_tpath(self, name):
+ return self.tpath + os.sep + name
+
+ def entry_exists_on_disk(self, name):
+ try:
+ d = self.on_disk_entries
+ except AttributeError:
+ d = {}
+ try:
+ entries = os.listdir(self.abspath)
+ except OSError:
+ pass
+ else:
+ for entry in map(_my_normcase, entries):
+ d[entry] = 1
+ self.on_disk_entries = d
+ return d.has_key(_my_normcase(name))
+
+ memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list'))
+
+ def srcdir_list(self):
+ try:
+ return self._memo['srcdir_list']
+ except KeyError:
+ pass
+
+ result = []
+
+ dirname = '.'
+ dir = self
+ while dir:
+ if dir.srcdir:
+ result.append(dir.srcdir.Dir(dirname))
+ dirname = dir.name + os.sep + dirname
+ dir = dir.up()
+
+ self._memo['srcdir_list'] = result
+
+ return result
+
+ def srcdir_duplicate(self, name):
+ for dir in self.srcdir_list():
+ if self.is_under(dir):
+ # We shouldn't source from something in the build path;
+ # variant_dir is probably under src_dir, in which case
+ # we are reflecting.
+ break
+ if dir.entry_exists_on_disk(name):
+ srcnode = dir.Entry(name).disambiguate()
+ if self.duplicate:
+ node = self.Entry(name).disambiguate()
+ node.do_duplicate(srcnode)
+ return node
+ else:
+ return srcnode
+ return None
+
+ def _srcdir_find_file_key(self, filename):
+ return filename
+
+ memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key))
+
+ def srcdir_find_file(self, filename):
+ try:
+ memo_dict = self._memo['srcdir_find_file']
+ except KeyError:
+ memo_dict = {}
+ self._memo['srcdir_find_file'] = memo_dict
+ else:
+ try:
+ return memo_dict[filename]
+ except KeyError:
+ pass
+
+ def func(node):
+ if (isinstance(node, File) or isinstance(node, Entry)) and \
+ (node.is_derived() or node.exists()):
+ return node
+ return None
+
+ norm_name = _my_normcase(filename)
+
+ for rdir in self.get_all_rdirs():
+ try: node = rdir.entries[norm_name]
+ except KeyError: node = rdir.file_on_disk(filename)
+ else: node = func(node)
+ if node:
+ result = (node, self)
+ memo_dict[filename] = result
+ return result
+
+ for srcdir in self.srcdir_list():
+ for rdir in srcdir.get_all_rdirs():
+ try: node = rdir.entries[norm_name]
+ except KeyError: node = rdir.file_on_disk(filename)
+ else: node = func(node)
+ if node:
+ result = (File(filename, self, self.fs), srcdir)
+ memo_dict[filename] = result
+ return result
+
+ result = (None, None)
+ memo_dict[filename] = result
+ return result
+
+ def dir_on_disk(self, name):
+ if self.entry_exists_on_disk(name):
+ try: return self.Dir(name)
+ except TypeError: pass
+ node = self.srcdir_duplicate(name)
+ if isinstance(node, File):
+ return None
+ return node
+
+ def file_on_disk(self, name):
+ if self.entry_exists_on_disk(name) or \
+ diskcheck_rcs(self, name) or \
+ diskcheck_sccs(self, name):
+ try: return self.File(name)
+ except TypeError: pass
+ node = self.srcdir_duplicate(name)
+ if isinstance(node, Dir):
+ return None
+ return node
+
+ def walk(self, func, arg):
+ """
+ Walk this directory tree by calling the specified function
+ for each directory in the tree.
+
+ This behaves like the os.path.walk() function, but for in-memory
+ Node.FS.Dir objects. The function takes the same arguments as
+ the functions passed to os.path.walk():
+
+ func(arg, dirname, fnames)
+
+ Except that "dirname" will actually be the directory *Node*,
+ not the string. The '.' and '..' entries are excluded from
+ fnames. The fnames list may be modified in-place to filter the
+ subdirectories visited or otherwise impose a specific order.
+ The "arg" argument is always passed to func() and may be used
+ in any way (or ignored, passing None is common).
+ """
+ entries = self.entries
+ names = entries.keys()
+ names.remove('.')
+ names.remove('..')
+ func(arg, self, names)
+ select_dirs = lambda n, e=entries: isinstance(e[n], Dir)
+ for dirname in filter(select_dirs, names):
+ entries[dirname].walk(func, arg)
+
+ def glob(self, pathname, ondisk=True, source=False, strings=False):
+ """
+ Returns a list of Nodes (or strings) matching a specified
+ pathname pattern.
+
+ Pathname patterns follow UNIX shell semantics: * matches
+ any-length strings of any characters, ? matches any character,
+ and [] can enclose lists or ranges of characters. Matches do
+ not span directory separators.
+
+ The matches take into account Repositories, returning local
+ Nodes if a corresponding entry exists in a Repository (either
+ an in-memory Node or something on disk).
+
+ By defafult, the glob() function matches entries that exist
+ on-disk, in addition to in-memory Nodes. Setting the "ondisk"
+ argument to False (or some other non-true value) causes the glob()
+ function to only match in-memory Nodes. The default behavior is
+ to return both the on-disk and in-memory Nodes.
+
+ The "source" argument, when true, specifies that corresponding
+ source Nodes must be returned if you're globbing in a build
+ directory (initialized with VariantDir()). The default behavior
+ is to return Nodes local to the VariantDir().
+
+ The "strings" argument, when true, returns the matches as strings,
+ not Nodes. The strings are path names relative to this directory.
+
+ The underlying algorithm is adapted from the glob.glob() function
+ in the Python library (but heavily modified), and uses fnmatch()
+ under the covers.
+ """
+ dirname, basename = os.path.split(pathname)
+ if not dirname:
+ return self._glob1(basename, ondisk, source, strings)
+ if has_glob_magic(dirname):
+ list = self.glob(dirname, ondisk, source, strings=False)
+ else:
+ list = [self.Dir(dirname, create=True)]
+ result = []
+ for dir in list:
+ r = dir._glob1(basename, ondisk, source, strings)
+ if strings:
+ r = map(lambda x, d=str(dir): os.path.join(d, x), r)
+ result.extend(r)
+ result.sort(lambda a, b: cmp(str(a), str(b)))
+ return result
+
+ def _glob1(self, pattern, ondisk=True, source=False, strings=False):
+ """
+ Globs for and returns a list of entry names matching a single
+ pattern in this directory.
+
+ This searches any repositories and source directories for
+ corresponding entries and returns a Node (or string) relative
+ to the current directory if an entry is found anywhere.
+
+ TODO: handle pattern with no wildcard
+ """
+ search_dir_list = self.get_all_rdirs()
+ for srcdir in self.srcdir_list():
+ search_dir_list.extend(srcdir.get_all_rdirs())
+
+ selfEntry = self.Entry
+ names = []
+ for dir in search_dir_list:
+ # We use the .name attribute from the Node because the keys of
+ # the dir.entries dictionary are normalized (that is, all upper
+ # case) on case-insensitive systems like Windows.
+ #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ]
+ entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys())
+ node_names = map(lambda n, e=dir.entries: e[n].name, entry_names)
+ names.extend(node_names)
+ if not strings:
+ # Make sure the working directory (self) actually has
+ # entries for all Nodes in repositories or variant dirs.
+ map(selfEntry, node_names)
+ if ondisk:
+ try:
+ disk_names = os.listdir(dir.abspath)
+ except os.error:
+ continue
+ names.extend(disk_names)
+ if not strings:
+ # We're going to return corresponding Nodes in
+ # the local directory, so we need to make sure
+ # those Nodes exist. We only want to create
+ # Nodes for the entries that will match the
+ # specified pattern, though, which means we
+ # need to filter the list here, even though
+ # the overall list will also be filtered later,
+ # after we exit this loop.
+ if pattern[0] != '.':
+ #disk_names = [ d for d in disk_names if d[0] != '.' ]
+ disk_names = filter(lambda x: x[0] != '.', disk_names)
+ disk_names = fnmatch.filter(disk_names, pattern)
+ dirEntry = dir.Entry
+ for name in disk_names:
+ # Add './' before disk filename so that '#' at
+ # beginning of filename isn't interpreted.
+ name = './' + name
+ node = dirEntry(name).disambiguate()
+ n = selfEntry(name)
+ if n.__class__ != node.__class__:
+ n.__class__ = node.__class__
+ n._morph()
+
+ names = set(names)
+ if pattern[0] != '.':
+ #names = [ n for n in names if n[0] != '.' ]
+ names = filter(lambda x: x[0] != '.', names)
+ names = fnmatch.filter(names, pattern)
+
+ if strings:
+ return names
+
+ #return [ self.entries[_my_normcase(n)] for n in names ]
+ return map(lambda n, e=self.entries: e[_my_normcase(n)], names)
+
+class RootDir(Dir):
+ """A class for the root directory of a file system.
+
+ This is the same as a Dir class, except that the path separator
+ ('/' or '\\') is actually part of the name, so we don't need to
+ add a separator when creating the path names of entries within
+ this directory.
+ """
+ def __init__(self, name, fs):
+ if __debug__: logInstanceCreation(self, 'Node.FS.RootDir')
+ # We're going to be our own parent directory (".." entry and .dir
+ # attribute) so we have to set up some values so Base.__init__()
+ # won't gag won't it calls some of our methods.
+ self.abspath = ''
+ self.labspath = ''
+ self.path = ''
+ self.tpath = ''
+ self.path_elements = []
+ self.duplicate = 0
+ self.root = self
+ Base.__init__(self, name, self, fs)
+
+ # Now set our paths to what we really want them to be: the
+ # initial drive letter (the name) plus the directory separator,
+ # except for the "lookup abspath," which does not have the
+ # drive letter.
+ self.abspath = name + os.sep
+ self.labspath = ''
+ self.path = name + os.sep
+ self.tpath = name + os.sep
+ self._morph()
+
+ self._lookupDict = {}
+
+ # The // and os.sep + os.sep entries are necessary because
+ # os.path.normpath() seems to preserve double slashes at the
+ # beginning of a path (presumably for UNC path names), but
+ # collapses triple slashes to a single slash.
+ self._lookupDict[''] = self
+ self._lookupDict['/'] = self
+ self._lookupDict['//'] = self
+ self._lookupDict[os.sep] = self
+ self._lookupDict[os.sep + os.sep] = self
+
+ def must_be_same(self, klass):
+ if klass is Dir:
+ return
+ Base.must_be_same(self, klass)
+
+ def _lookup_abs(self, p, klass, create=1):
+ """
+ Fast (?) lookup of a *normalized* absolute path.
+
+ This method is intended for use by internal lookups with
+ already-normalized path data. For general-purpose lookups,
+ use the FS.Entry(), FS.Dir() or FS.File() methods.
+
+ The caller is responsible for making sure we're passed a
+ normalized absolute path; we merely let Python's dictionary look
+ up and return the One True Node.FS object for the path.
+
+ If no Node for the specified "p" doesn't already exist, and
+ "create" is specified, the Node may be created after recursive
+ invocation to find or create the parent directory or directories.
+ """
+ k = _my_normcase(p)
+ try:
+ result = self._lookupDict[k]
+ except KeyError:
+ if not create:
+ raise SCons.Errors.UserError
+ # There is no Node for this path name, and we're allowed
+ # to create it.
+ dir_name, file_name = os.path.split(p)
+ dir_node = self._lookup_abs(dir_name, Dir)
+ result = klass(file_name, dir_node, self.fs)
+
+ # Double-check on disk (as configured) that the Node we
+ # created matches whatever is out there in the real world.
+ result.diskcheck_match()
+
+ self._lookupDict[k] = result
+ dir_node.entries[_my_normcase(file_name)] = result
+ dir_node.implicit = None
+ else:
+ # There is already a Node for this path name. Allow it to
+ # complain if we were looking for an inappropriate type.
+ result.must_be_same(klass)
+ return result
+
+ def __str__(self):
+ return self.abspath
+
+ def entry_abspath(self, name):
+ return self.abspath + name
+
+ def entry_labspath(self, name):
+ return '/' + name
+
+ def entry_path(self, name):
+ return self.path + name
+
+ def entry_tpath(self, name):
+ return self.tpath + name
+
+ def is_under(self, dir):
+ if self is dir:
+ return 1
+ else:
+ return 0
+
+ def up(self):
+ return None
+
+ def get_dir(self):
+ return None
+
+ def src_builder(self):
+ return _null
+
+class FileNodeInfo(SCons.Node.NodeInfoBase):
+ current_version_id = 1
+
+ field_list = ['csig', 'timestamp', 'size']
+
+ # This should get reset by the FS initialization.
+ fs = None
+
+ def str_to_node(self, s):
+ top = self.fs.Top
+ root = top.root
+ if do_splitdrive:
+ drive, s = os.path.splitdrive(s)
+ if drive:
+ root = self.fs.get_root(drive)
+ if not os.path.isabs(s):
+ s = top.labspath + '/' + s
+ return root._lookup_abs(s, Entry)
+
+class FileBuildInfo(SCons.Node.BuildInfoBase):
+ current_version_id = 1
+
+ def convert_to_sconsign(self):
+ """
+ Converts this FileBuildInfo object for writing to a .sconsign file
+
+ This replaces each Node in our various dependency lists with its
+ usual string representation: relative to the top-level SConstruct
+ directory, or an absolute path if it's outside.
+ """
+ if os.sep == '/':
+ node_to_str = str
+ else:
+ def node_to_str(n):
+ try:
+ s = n.path
+ except AttributeError:
+ s = str(n)
+ else:
+ s = string.replace(s, os.sep, '/')
+ return s
+ for attr in ['bsources', 'bdepends', 'bimplicit']:
+ try:
+ val = getattr(self, attr)
+ except AttributeError:
+ pass
+ else:
+ setattr(self, attr, map(node_to_str, val))
+ def convert_from_sconsign(self, dir, name):
+ """
+ Converts a newly-read FileBuildInfo object for in-SCons use
+
+ For normal up-to-date checking, we don't have any conversion to
+ perform--but we're leaving this method here to make that clear.
+ """
+ pass
+ def prepare_dependencies(self):
+ """
+ Prepares a FileBuildInfo object for explaining what changed
+
+ The bsources, bdepends and bimplicit lists have all been
+ stored on disk as paths relative to the top-level SConstruct
+ directory. Convert the strings to actual Nodes (for use by the
+ --debug=explain code and --implicit-cache).
+ """
+ attrs = [
+ ('bsources', 'bsourcesigs'),
+ ('bdepends', 'bdependsigs'),
+ ('bimplicit', 'bimplicitsigs'),
+ ]
+ for (nattr, sattr) in attrs:
+ try:
+ strings = getattr(self, nattr)
+ nodeinfos = getattr(self, sattr)
+ except AttributeError:
+ continue
+ nodes = []
+ for s, ni in izip(strings, nodeinfos):
+ if not isinstance(s, SCons.Node.Node):
+ s = ni.str_to_node(s)
+ nodes.append(s)
+ setattr(self, nattr, nodes)
+ def format(self, names=0):
+ result = []
+ bkids = self.bsources + self.bdepends + self.bimplicit
+ bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs
+ for bkid, bkidsig in izip(bkids, bkidsigs):
+ result.append(str(bkid) + ': ' +
+ string.join(bkidsig.format(names=names), ' '))
+ result.append('%s [%s]' % (self.bactsig, self.bact))
+ return string.join(result, '\n')
+
+class File(Base):
+ """A class for files in a file system.
+ """
+
+ memoizer_counters = []
+
+ NodeInfo = FileNodeInfo
+ BuildInfo = FileBuildInfo
+
+ md5_chunksize = 64
+
+ def diskcheck_match(self):
+ diskcheck_match(self, self.isdir,
+ "Directory %s found where file expected.")
+
+ def __init__(self, name, directory, fs):
+ if __debug__: logInstanceCreation(self, 'Node.FS.File')
+ Base.__init__(self, name, directory, fs)
+ self._morph()
+
+ def Entry(self, name):
+ """Create an entry node named 'name' relative to
+ the directory of this file."""
+ return self.dir.Entry(name)
+
+ def Dir(self, name, create=True):
+ """Create a directory node named 'name' relative to
+ the directory of this file."""
+ return self.dir.Dir(name, create=create)
+
+ def Dirs(self, pathlist):
+ """Create a list of directories relative to the SConscript
+ directory of this file."""
+ # TODO(1.5)
+ # return [self.Dir(p) for p in pathlist]
+ return map(lambda p, s=self: s.Dir(p), pathlist)
+
+ def File(self, name):
+ """Create a file node named 'name' relative to
+ the directory of this file."""
+ return self.dir.File(name)
+
+ #def generate_build_dict(self):
+ # """Return an appropriate dictionary of values for building
+ # this File."""
+ # return {'Dir' : self.Dir,
+ # 'File' : self.File,
+ # 'RDirs' : self.RDirs}
+
+ def _morph(self):
+ """Turn a file system node into a File object."""
+ self.scanner_paths = {}
+ if not hasattr(self, '_local'):
+ self._local = 0
+
+ # If there was already a Builder set on this entry, then
+ # we need to make sure we call the target-decider function,
+ # not the source-decider. Reaching in and doing this by hand
+ # is a little bogus. We'd prefer to handle this by adding
+ # an Entry.builder_set() method that disambiguates like the
+ # other methods, but that starts running into problems with the
+ # fragile way we initialize Dir Nodes with their Mkdir builders,
+ # yet still allow them to be overridden by the user. Since it's
+ # not clear right now how to fix that, stick with what works
+ # until it becomes clear...
+ if self.has_builder():
+ self.changed_since_last_build = self.decide_target
+
+ def scanner_key(self):
+ return self.get_suffix()
+
+ def get_contents(self):
+ if not self.rexists():
+ return ''
+ fname = self.rfile().abspath
+ try:
+ r = open(fname, "rb").read()
+ except EnvironmentError, e:
+ if not e.filename:
+ e.filename = fname
+ raise
+ return r
+
+ def get_content_hash(self):
+ """
+ Compute and return the MD5 hash for this file.
+ """
+ if not self.rexists():
+ return SCons.Util.MD5signature('')
+ fname = self.rfile().abspath
+ try:
+ cs = SCons.Util.MD5filesignature(fname,
+ chunksize=SCons.Node.FS.File.md5_chunksize*1024)
+ except EnvironmentError, e:
+ if not e.filename:
+ e.filename = fname
+ raise
+ return cs
+
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_size'))
+
+ def get_size(self):
+ try:
+ return self._memo['get_size']
+ except KeyError:
+ pass
+
+ if self.rexists():
+ size = self.rfile().getsize()
+ else:
+ size = 0
+
+ self._memo['get_size'] = size
+
+ return size
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp'))
+
+ def get_timestamp(self):
+ try:
+ return self._memo['get_timestamp']
+ except KeyError:
+ pass
+
+ if self.rexists():
+ timestamp = self.rfile().getmtime()
+ else:
+ timestamp = 0
+
+ self._memo['get_timestamp'] = timestamp
+
+ return timestamp
+
+ def store_info(self):
+ # Merge our build information into the already-stored entry.
+ # This accomodates "chained builds" where a file that's a target
+ # in one build (SConstruct file) is a source in a different build.
+ # See test/chained-build.py for the use case.
+ if do_store_info:
+ self.dir.sconsign().store_info(self.name, self)
+
+ convert_copy_attrs = [
+ 'bsources',
+ 'bimplicit',
+ 'bdepends',
+ 'bact',
+ 'bactsig',
+ 'ninfo',
+ ]
+
+
+ convert_sig_attrs = [
+ 'bsourcesigs',
+ 'bimplicitsigs',
+ 'bdependsigs',
+ ]
+
+ def convert_old_entry(self, old_entry):
+ # Convert a .sconsign entry from before the Big Signature
+ # Refactoring, doing what we can to convert its information
+ # to the new .sconsign entry format.
+ #
+ # The old format looked essentially like this:
+ #
+ # BuildInfo
+ # .ninfo (NodeInfo)
+ # .bsig
+ # .csig
+ # .timestamp
+ # .size
+ # .bsources
+ # .bsourcesigs ("signature" list)
+ # .bdepends
+ # .bdependsigs ("signature" list)
+ # .bimplicit
+ # .bimplicitsigs ("signature" list)
+ # .bact
+ # .bactsig
+ #
+ # The new format looks like this:
+ #
+ # .ninfo (NodeInfo)
+ # .bsig
+ # .csig
+ # .timestamp
+ # .size
+ # .binfo (BuildInfo)
+ # .bsources
+ # .bsourcesigs (NodeInfo list)
+ # .bsig
+ # .csig
+ # .timestamp
+ # .size
+ # .bdepends
+ # .bdependsigs (NodeInfo list)
+ # .bsig
+ # .csig
+ # .timestamp
+ # .size
+ # .bimplicit
+ # .bimplicitsigs (NodeInfo list)
+ # .bsig
+ # .csig
+ # .timestamp
+ # .size
+ # .bact
+ # .bactsig
+ #
+ # The basic idea of the new structure is that a NodeInfo always
+ # holds all available information about the state of a given Node
+ # at a certain point in time. The various .b*sigs lists can just
+ # be a list of pointers to the .ninfo attributes of the different
+ # dependent nodes, without any copying of information until it's
+ # time to pickle it for writing out to a .sconsign file.
+ #
+ # The complicating issue is that the *old* format only stored one
+ # "signature" per dependency, based on however the *last* build
+ # was configured. We don't know from just looking at it whether
+ # it was a build signature, a content signature, or a timestamp
+ # "signature". Since we no longer use build signatures, the
+ # best we can do is look at the length and if it's thirty two,
+ # assume that it was (or might have been) a content signature.
+ # If it was actually a build signature, then it will cause a
+ # rebuild anyway when it doesn't match the new content signature,
+ # but that's probably the best we can do.
+ import SCons.SConsign
+ new_entry = SCons.SConsign.SConsignEntry()
+ new_entry.binfo = self.new_binfo()
+ binfo = new_entry.binfo
+ for attr in self.convert_copy_attrs:
+ try:
+ value = getattr(old_entry, attr)
+ except AttributeError:
+ continue
+ setattr(binfo, attr, value)
+ delattr(old_entry, attr)
+ for attr in self.convert_sig_attrs:
+ try:
+ sig_list = getattr(old_entry, attr)
+ except AttributeError:
+ continue
+ value = []
+ for sig in sig_list:
+ ninfo = self.new_ninfo()
+ if len(sig) == 32:
+ ninfo.csig = sig
+ else:
+ ninfo.timestamp = sig
+ value.append(ninfo)
+ setattr(binfo, attr, value)
+ delattr(old_entry, attr)
+ return new_entry
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info'))
+
+ def get_stored_info(self):
+ try:
+ return self._memo['get_stored_info']
+ except KeyError:
+ pass
+
+ try:
+ sconsign_entry = self.dir.sconsign().get_entry(self.name)
+ except (KeyError, EnvironmentError):
+ import SCons.SConsign
+ sconsign_entry = SCons.SConsign.SConsignEntry()
+ sconsign_entry.binfo = self.new_binfo()
+ sconsign_entry.ninfo = self.new_ninfo()
+ else:
+ if isinstance(sconsign_entry, FileBuildInfo):
+ # This is a .sconsign file from before the Big Signature
+ # Refactoring; convert it as best we can.
+ sconsign_entry = self.convert_old_entry(sconsign_entry)
+ try:
+ delattr(sconsign_entry.ninfo, 'bsig')
+ except AttributeError:
+ pass
+
+ self._memo['get_stored_info'] = sconsign_entry
+
+ return sconsign_entry
+
+ def get_stored_implicit(self):
+ binfo = self.get_stored_info().binfo
+ binfo.prepare_dependencies()
+ try: return binfo.bimplicit
+ except AttributeError: return None
+
+ def rel_path(self, other):
+ return self.dir.rel_path(other)
+
+ def _get_found_includes_key(self, env, scanner, path):
+ return (id(env), id(scanner), path)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key))
+
+ def get_found_includes(self, env, scanner, path):
+ """Return the included implicit dependencies in this file.
+ Cache results so we only scan the file once per path
+ regardless of how many times this information is requested.
+ """
+ memo_key = (id(env), id(scanner), path)
+ try:
+ memo_dict = self._memo['get_found_includes']
+ except KeyError:
+ memo_dict = {}
+ self._memo['get_found_includes'] = memo_dict
+ else:
+ try:
+ return memo_dict[memo_key]
+ except KeyError:
+ pass
+
+ if scanner:
+ # result = [n.disambiguate() for n in scanner(self, env, path)]
+ result = scanner(self, env, path)
+ result = map(lambda N: N.disambiguate(), result)
+ else:
+ result = []
+
+ memo_dict[memo_key] = result
+
+ return result
+
+ def _createDir(self):
+ # ensure that the directories for this node are
+ # created.
+ self.dir._create()
+
+ def retrieve_from_cache(self):
+ """Try to retrieve the node's content from a cache
+
+ This method is called from multiple threads in a parallel build,
+ so only do thread safe stuff here. Do thread unsafe stuff in
+ built().
+
+ Returns true iff the node was successfully retrieved.
+ """
+ if self.nocache:
+ return None
+ if not self.is_derived():
+ return None
+ return self.get_build_env().get_CacheDir().retrieve(self)
+
+ def built(self):
+ """
+ Called just after this node is successfully built.
+ """
+ # Push this file out to cache before the superclass Node.built()
+ # method has a chance to clear the build signature, which it
+ # will do if this file has a source scanner.
+ #
+ # We have to clear the memoized values *before* we push it to
+ # cache so that the memoization of the self.exists() return
+ # value doesn't interfere.
+ self.clear_memoized_values()
+ if self.exists():
+ self.get_build_env().get_CacheDir().push(self)
+ SCons.Node.Node.built(self)
+
+ def visited(self):
+ if self.exists():
+ self.get_build_env().get_CacheDir().push_if_forced(self)
+
+ ninfo = self.get_ninfo()
+
+ csig = self.get_max_drift_csig()
+ if csig:
+ ninfo.csig = csig
+
+ ninfo.timestamp = self.get_timestamp()
+ ninfo.size = self.get_size()
+
+ if not self.has_builder():
+ # This is a source file, but it might have been a target file
+ # in another build that included more of the DAG. Copy
+ # any build information that's stored in the .sconsign file
+ # into our binfo object so it doesn't get lost.
+ old = self.get_stored_info()
+ self.get_binfo().__dict__.update(old.binfo.__dict__)
+
+ self.store_info()
+
+ def find_src_builder(self):
+ if self.rexists():
+ return None
+ scb = self.dir.src_builder()
+ if scb is _null:
+ if diskcheck_sccs(self.dir, self.name):
+ scb = get_DefaultSCCSBuilder()
+ elif diskcheck_rcs(self.dir, self.name):
+ scb = get_DefaultRCSBuilder()
+ else:
+ scb = None
+ if scb is not None:
+ try:
+ b = self.builder
+ except AttributeError:
+ b = None
+ if b is None:
+ self.builder_set(scb)
+ return scb
+
+ def has_src_builder(self):
+ """Return whether this Node has a source builder or not.
+
+ If this Node doesn't have an explicit source code builder, this
+ is where we figure out, on the fly, if there's a transparent
+ source code builder for it.
+
+ Note that if we found a source builder, we also set the
+ self.builder attribute, so that all of the methods that actually
+ *build* this file don't have to do anything different.
+ """
+ try:
+ scb = self.sbuilder
+ except AttributeError:
+ scb = self.sbuilder = self.find_src_builder()
+ return scb is not None
+
+ def alter_targets(self):
+ """Return any corresponding targets in a variant directory.
+ """
+ if self.is_derived():
+ return [], None
+ return self.fs.variant_dir_target_climb(self, self.dir, [self.name])
+
+ def _rmv_existing(self):
+ self.clear_memoized_values()
+ e = Unlink(self, [], None)
+ if isinstance(e, SCons.Errors.BuildError):
+ raise e
+
+ #
+ # Taskmaster interface subsystem
+ #
+
+ def make_ready(self):
+ self.has_src_builder()
+ self.get_binfo()
+
+ def prepare(self):
+ """Prepare for this file to be created."""
+ SCons.Node.Node.prepare(self)
+
+ if self.get_state() != SCons.Node.up_to_date:
+ if self.exists():
+ if self.is_derived() and not self.precious:
+ self._rmv_existing()
+ else:
+ try:
+ self._createDir()
+ except SCons.Errors.StopError, drive:
+ desc = "No drive `%s' for target `%s'." % (drive, self)
+ raise SCons.Errors.StopError, desc
+
+ #
+ #
+ #
+
+ def remove(self):
+ """Remove this file."""
+ if self.exists() or self.islink():
+ self.fs.unlink(self.path)
+ return 1
+ return None
+
+ def do_duplicate(self, src):
+ self._createDir()
+ Unlink(self, None, None)
+ e = Link(self, src, None)
+ if isinstance(e, SCons.Errors.BuildError):
+ desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr)
+ raise SCons.Errors.StopError, desc
+ self.linked = 1
+ # The Link() action may or may not have actually
+ # created the file, depending on whether the -n
+ # option was used or not. Delete the _exists and
+ # _rexists attributes so they can be reevaluated.
+ self.clear()
+
+ memoizer_counters.append(SCons.Memoize.CountValue('exists'))
+
+ def exists(self):
+ try:
+ return self._memo['exists']
+ except KeyError:
+ pass
+ # Duplicate from source path if we are set up to do this.
+ if self.duplicate and not self.is_derived() and not self.linked:
+ src = self.srcnode()
+ if src is not self:
+ # At this point, src is meant to be copied in a variant directory.
+ src = src.rfile()
+ if src.abspath != self.abspath:
+ if src.exists():
+ self.do_duplicate(src)
+ # Can't return 1 here because the duplication might
+ # not actually occur if the -n option is being used.
+ else:
+ # The source file does not exist. Make sure no old
+ # copy remains in the variant directory.
+ if Base.exists(self) or self.islink():
+ self.fs.unlink(self.path)
+ # Return None explicitly because the Base.exists() call
+ # above will have cached its value if the file existed.
+ self._memo['exists'] = None
+ return None
+ result = Base.exists(self)
+ self._memo['exists'] = result
+ return result
+
+ #
+ # SIGNATURE SUBSYSTEM
+ #
+
+ def get_max_drift_csig(self):
+ """
+ Returns the content signature currently stored for this node
+ if it's been unmodified longer than the max_drift value, or the
+ max_drift value is 0. Returns None otherwise.
+ """
+ old = self.get_stored_info()
+ mtime = self.get_timestamp()
+
+ max_drift = self.fs.max_drift
+ if max_drift > 0:
+ if (time.time() - mtime) > max_drift:
+ try:
+ n = old.ninfo
+ if n.timestamp and n.csig and n.timestamp == mtime:
+ return n.csig
+ except AttributeError:
+ pass
+ elif max_drift == 0:
+ try:
+ return old.ninfo.csig
+ except AttributeError:
+ pass
+
+ return None
+
+ def get_csig(self):
+ """
+ Generate a node's content signature, the digested signature
+ of its content.
+
+ node - the node
+ cache - alternate node to use for the signature cache
+ returns - the content signature
+ """
+ ninfo = self.get_ninfo()
+ try:
+ return ninfo.csig
+ except AttributeError:
+ pass
+
+ csig = self.get_max_drift_csig()
+ if csig is None:
+
+ try:
+ if self.get_size() < SCons.Node.FS.File.md5_chunksize:
+ contents = self.get_contents()
+ else:
+ csig = self.get_content_hash()
+ except IOError:
+ # This can happen if there's actually a directory on-disk,
+ # which can be the case if they've disabled disk checks,
+ # or if an action with a File target actually happens to
+ # create a same-named directory by mistake.
+ csig = ''
+ else:
+ if not csig:
+ csig = SCons.Util.MD5signature(contents)
+
+ ninfo.csig = csig
+
+ return csig
+
+ #
+ # DECISION SUBSYSTEM
+ #
+
+ def builder_set(self, builder):
+ SCons.Node.Node.builder_set(self, builder)
+ self.changed_since_last_build = self.decide_target
+
+ def changed_content(self, target, prev_ni):
+ cur_csig = self.get_csig()
+ try:
+ return cur_csig != prev_ni.csig
+ except AttributeError:
+ return 1
+
+ def changed_state(self, target, prev_ni):
+ return self.state != SCons.Node.up_to_date
+
+ def changed_timestamp_then_content(self, target, prev_ni):
+ if not self.changed_timestamp_match(target, prev_ni):
+ try:
+ self.get_ninfo().csig = prev_ni.csig
+ except AttributeError:
+ pass
+ return False
+ return self.changed_content(target, prev_ni)
+
+ def changed_timestamp_newer(self, target, prev_ni):
+ try:
+ return self.get_timestamp() > target.get_timestamp()
+ except AttributeError:
+ return 1
+
+ def changed_timestamp_match(self, target, prev_ni):
+ try:
+ return self.get_timestamp() != prev_ni.timestamp
+ except AttributeError:
+ return 1
+
+ def decide_source(self, target, prev_ni):
+ return target.get_build_env().decide_source(self, target, prev_ni)
+
+ def decide_target(self, target, prev_ni):
+ return target.get_build_env().decide_target(self, target, prev_ni)
+
+ # Initialize this Node's decider function to decide_source() because
+ # every file is a source file until it has a Builder attached...
+ changed_since_last_build = decide_source
+
+ def is_up_to_date(self):
+ T = 0
+ if T: Trace('is_up_to_date(%s):' % self)
+ if not self.exists():
+ if T: Trace(' not self.exists():')
+ # The file doesn't exist locally...
+ r = self.rfile()
+ if r != self:
+ # ...but there is one in a Repository...
+ if not self.changed(r):
+ if T: Trace(' changed(%s):' % r)
+ # ...and it's even up-to-date...
+ if self._local:
+ # ...and they'd like a local copy.
+ e = LocalCopy(self, r, None)
+ if isinstance(e, SCons.Errors.BuildError):
+ raise
+ self.store_info()
+ if T: Trace(' 1\n')
+ return 1
+ self.changed()
+ if T: Trace(' None\n')
+ return None
+ else:
+ r = self.changed()
+ if T: Trace(' self.exists(): %s\n' % r)
+ return not r
+
+ memoizer_counters.append(SCons.Memoize.CountValue('rfile'))
+
+ def rfile(self):
+ try:
+ return self._memo['rfile']
+ except KeyError:
+ pass
+ result = self
+ if not self.exists():
+ norm_name = _my_normcase(self.name)
+ for dir in self.dir.get_all_rdirs():
+ try: node = dir.entries[norm_name]
+ except KeyError: node = dir.file_on_disk(self.name)
+ if node and node.exists() and \
+ (isinstance(node, File) or isinstance(node, Entry) \
+ or not node.is_derived()):
+ result = node
+ break
+ self._memo['rfile'] = result
+ return result
+
+ def rstr(self):
+ return str(self.rfile())
+
+ def get_cachedir_csig(self):
+ """
+ Fetch a Node's content signature for purposes of computing
+ another Node's cachesig.
+
+ This is a wrapper around the normal get_csig() method that handles
+ the somewhat obscure case of using CacheDir with the -n option.
+ Any files that don't exist would normally be "built" by fetching
+ them from the cache, but the normal get_csig() method will try
+ to open up the local file, which doesn't exist because the -n
+ option meant we didn't actually pull the file from cachedir.
+ But since the file *does* actually exist in the cachedir, we
+ can use its contents for the csig.
+ """
+ try:
+ return self.cachedir_csig
+ except AttributeError:
+ pass
+
+ cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
+ if not self.exists() and cachefile and os.path.exists(cachefile):
+ self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \
+ SCons.Node.FS.File.md5_chunksize * 1024)
+ else:
+ self.cachedir_csig = self.get_csig()
+ return self.cachedir_csig
+
+ def get_cachedir_bsig(self):
+ try:
+ return self.cachesig
+ except AttributeError:
+ pass
+
+ # Add the path to the cache signature, because multiple
+ # targets built by the same action will all have the same
+ # build signature, and we have to differentiate them somehow.
+ children = self.children()
+ executor = self.get_executor()
+ # sigs = [n.get_cachedir_csig() for n in children]
+ sigs = map(lambda n: n.get_cachedir_csig(), children)
+ sigs.append(SCons.Util.MD5signature(executor.get_contents()))
+ sigs.append(self.path)
+ result = self.cachesig = SCons.Util.MD5collect(sigs)
+ return result
+
+
+default_fs = None
+
+def get_default_fs():
+ global default_fs
+ if not default_fs:
+ default_fs = FS()
+ return default_fs
+
+class FileFinder:
+ """
+ """
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ def __init__(self):
+ self._memo = {}
+
+ def filedir_lookup(self, p, fd=None):
+ """
+ A helper method for find_file() that looks up a directory for
+ a file we're trying to find. This only creates the Dir Node if
+ it exists on-disk, since if the directory doesn't exist we know
+ we won't find any files in it... :-)
+
+ It would be more compact to just use this as a nested function
+ with a default keyword argument (see the commented-out version
+ below), but that doesn't work unless you have nested scopes,
+ so we define it here just so this work under Python 1.5.2.
+ """
+ if fd is None:
+ fd = self.default_filedir
+ dir, name = os.path.split(fd)
+ drive, d = os.path.splitdrive(dir)
+ if d in ('/', os.sep):
+ return p.fs.get_root(drive).dir_on_disk(name)
+ if dir:
+ p = self.filedir_lookup(p, dir)
+ if not p:
+ return None
+ norm_name = _my_normcase(name)
+ try:
+ node = p.entries[norm_name]
+ except KeyError:
+ return p.dir_on_disk(name)
+ if isinstance(node, Dir):
+ return node
+ if isinstance(node, Entry):
+ node.must_be_same(Dir)
+ return node
+ return None
+
+ def _find_file_key(self, filename, paths, verbose=None):
+ return (filename, paths)
+
+ memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key))
+
+ def find_file(self, filename, paths, verbose=None):
+ """
+ find_file(str, [Dir()]) -> [nodes]
+
+ filename - a filename to find
+ paths - a list of directory path *nodes* to search in. Can be
+ represented as a list, a tuple, or a callable that is
+ called with no arguments and returns the list or tuple.
+
+ returns - the node created from the found file.
+
+ Find a node corresponding to either a derived file or a file
+ that exists already.
+
+ Only the first file found is returned, and none is returned
+ if no file is found.
+ """
+ memo_key = self._find_file_key(filename, paths)
+ try:
+ memo_dict = self._memo['find_file']
+ except KeyError:
+ memo_dict = {}
+ self._memo['find_file'] = memo_dict
+ else:
+ try:
+ return memo_dict[memo_key]
+ except KeyError:
+ pass
+
+ if verbose and not callable(verbose):
+ if not SCons.Util.is_String(verbose):
+ verbose = "find_file"
+ verbose = ' %s: ' % verbose
+ verbose = lambda s, v=verbose: sys.stdout.write(v + s)
+
+ filedir, filename = os.path.split(filename)
+ if filedir:
+ # More compact code that we can't use until we drop
+ # support for Python 1.5.2:
+ #
+ #def filedir_lookup(p, fd=filedir):
+ # """
+ # A helper function that looks up a directory for a file
+ # we're trying to find. This only creates the Dir Node
+ # if it exists on-disk, since if the directory doesn't
+ # exist we know we won't find any files in it... :-)
+ # """
+ # dir, name = os.path.split(fd)
+ # if dir:
+ # p = filedir_lookup(p, dir)
+ # if not p:
+ # return None
+ # norm_name = _my_normcase(name)
+ # try:
+ # node = p.entries[norm_name]
+ # except KeyError:
+ # return p.dir_on_disk(name)
+ # if isinstance(node, Dir):
+ # return node
+ # if isinstance(node, Entry):
+ # node.must_be_same(Dir)
+ # return node
+ # if isinstance(node, Dir) or isinstance(node, Entry):
+ # return node
+ # return None
+ #paths = filter(None, map(filedir_lookup, paths))
+
+ self.default_filedir = filedir
+ paths = filter(None, map(self.filedir_lookup, paths))
+
+ result = None
+ for dir in paths:
+ if verbose:
+ verbose("looking for '%s' in '%s' ...\n" % (filename, dir))
+ node, d = dir.srcdir_find_file(filename)
+ if node:
+ if verbose:
+ verbose("... FOUND '%s' in '%s'\n" % (filename, d))
+ result = node
+ break
+
+ memo_dict[memo_key] = result
+
+ return result
+
+find_file = FileFinder().find_file
+
+
+def invalidate_node_memos(targets):
+ """
+ Invalidate the memoized values of all Nodes (files or directories)
+ that are associated with the given entries. Has been added to
+ clear the cache of nodes affected by a direct execution of an
+ action (e.g. Delete/Copy/Chmod). Existing Node caches become
+ inconsistent if the action is run through Execute(). The argument
+ `targets` can be a single Node object or filename, or a sequence
+ of Nodes/filenames.
+ """
+ from traceback import extract_stack
+
+ # First check if the cache really needs to be flushed. Only
+ # actions run in the SConscript with Execute() seem to be
+ # affected. XXX The way to check if Execute() is in the stacktrace
+ # is a very dirty hack and should be replaced by a more sensible
+ # solution.
+ for f in extract_stack():
+ if f[2] == 'Execute' and f[0][-14:] == 'Environment.py':
+ break
+ else:
+ # Dont have to invalidate, so return
+ return
+
+ if not SCons.Util.is_List(targets):
+ targets = [targets]
+
+ for entry in targets:
+ # If the target is a Node object, clear the cache. If it is a
+ # filename, look up potentially existing Node object first.
+ try:
+ entry.clear_memoized_values()
+ except AttributeError:
+ # Not a Node object, try to look up Node by filename. XXX
+ # This creates Node objects even for those filenames which
+ # do not correspond to an existing Node object.
+ node = get_default_fs().Entry(entry)
+ if node:
+ node.clear_memoized_values()
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Node/Python.py b/tools/scons/scons-local-1.2.0/SCons/Node/Python.py
new file mode 100644
index 000000000..21fbb157c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Node/Python.py
@@ -0,0 +1,119 @@
+"""scons.Node.Python
+
+Python nodes.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Node/Python.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Node
+
+class ValueNodeInfo(SCons.Node.NodeInfoBase):
+ current_version_id = 1
+
+ field_list = ['csig']
+
+ def str_to_node(self, s):
+ return Value(s)
+
+class ValueBuildInfo(SCons.Node.BuildInfoBase):
+ current_version_id = 1
+
+class Value(SCons.Node.Node):
+ """A class for Python variables, typically passed on the command line
+ or generated by a script, but not from a file or some other source.
+ """
+
+ NodeInfo = ValueNodeInfo
+ BuildInfo = ValueBuildInfo
+
+ def __init__(self, value, built_value=None):
+ SCons.Node.Node.__init__(self)
+ self.value = value
+ if not built_value is None:
+ self.built_value = built_value
+
+ def str_for_display(self):
+ return repr(self.value)
+
+ def __str__(self):
+ return str(self.value)
+
+ def make_ready(self):
+ self.get_csig()
+
+ def build(self, **kw):
+ if not hasattr(self, 'built_value'):
+ apply (SCons.Node.Node.build, (self,), kw)
+
+ is_up_to_date = SCons.Node.Node.children_are_up_to_date
+
+ def is_under(self, dir):
+ # Make Value nodes get built regardless of
+ # what directory scons was run from. Value nodes
+ # are outside the filesystem:
+ return 1
+
+ def write(self, built_value):
+ """Set the value of the node."""
+ self.built_value = built_value
+
+ def read(self):
+ """Return the value. If necessary, the value is built."""
+ self.build()
+ if not hasattr(self, 'built_value'):
+ self.built_value = self.value
+ return self.built_value
+
+ def get_contents(self):
+ """By the assumption that the node.built_value is a
+ deterministic product of the sources, the contents of a Value
+ are the concatenation of all the contents of its sources. As
+ the value need not be built when get_contents() is called, we
+ cannot use the actual node.built_value."""
+ contents = str(self.value)
+ for kid in self.children(None):
+ contents = contents + kid.get_contents()
+ return contents
+
+ def changed_since_last_build(self, target, prev_ni):
+ cur_csig = self.get_csig()
+ try:
+ return cur_csig != prev_ni.csig
+ except AttributeError:
+ return 1
+
+ def get_csig(self, calc=None):
+ """Because we're a Python value node and don't have a real
+ timestamp, we get to ignore the calculator and just use the
+ value contents."""
+ try:
+ return self.ninfo.csig
+ except AttributeError:
+ pass
+ contents = self.get_contents()
+ self.get_ninfo().csig = contents
+ return contents
diff --git a/tools/scons/scons-local-1.2.0/SCons/Node/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Node/__init__.py
new file mode 100644
index 000000000..8ea6719e0
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Node/__init__.py
@@ -0,0 +1,1330 @@
+"""SCons.Node
+
+The Node package for the SCons software construction utility.
+
+This is, in many ways, the heart of SCons.
+
+A Node is where we encapsulate all of the dependency information about
+any thing that SCons can build, or about any thing which SCons can use
+to build some other thing. The canonical "thing," of course, is a file,
+but a Node can also represent something remote (like a web page) or
+something completely abstract (like an Alias).
+
+Each specific type of "thing" is specifically represented by a subclass
+of the Node base class: Node.FS.File for files, Node.Alias for aliases,
+etc. Dependency information is kept here in the base class, and
+information specific to files/aliases/etc. is in the subclass. The
+goal, if we've done this correctly, is that any type of "thing" should
+be able to depend on any other type of "thing."
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Node/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import copy
+from itertools import chain, izip
+import string
+import UserList
+
+from SCons.Debug import logInstanceCreation
+import SCons.Executor
+import SCons.Memoize
+import SCons.Util
+
+from SCons.Debug import Trace
+
+def classname(obj):
+ return string.split(str(obj.__class__), '.')[-1]
+
+# Node states
+#
+# These are in "priority" order, so that the maximum value for any
+# child/dependency of a node represents the state of that node if
+# it has no builder of its own. The canonical example is a file
+# system directory, which is only up to date if all of its children
+# were up to date.
+no_state = 0
+pending = 1
+executing = 2
+up_to_date = 3
+executed = 4
+failed = 5
+
+StateString = {
+ 0 : "no_state",
+ 1 : "pending",
+ 2 : "executing",
+ 3 : "up_to_date",
+ 4 : "executed",
+ 5 : "failed",
+}
+
+# controls whether implicit dependencies are cached:
+implicit_cache = 0
+
+# controls whether implicit dep changes are ignored:
+implicit_deps_unchanged = 0
+
+# controls whether the cached implicit deps are ignored:
+implicit_deps_changed = 0
+
+# A variable that can be set to an interface-specific function be called
+# to annotate a Node with information about its creation.
+def do_nothing(node): pass
+
+Annotate = do_nothing
+
+# Classes for signature info for Nodes.
+
+class NodeInfoBase:
+ """
+ The generic base class for signature information for a Node.
+
+ Node subclasses should subclass NodeInfoBase to provide their own
+ logic for dealing with their own Node-specific signature information.
+ """
+ current_version_id = 1
+ def __init__(self, node):
+ # Create an object attribute from the class attribute so it ends up
+ # in the pickled data in the .sconsign file.
+ self._version_id = self.current_version_id
+ def update(self, node):
+ try:
+ field_list = self.field_list
+ except AttributeError:
+ return
+ for f in field_list:
+ try:
+ delattr(self, f)
+ except AttributeError:
+ pass
+ try:
+ func = getattr(node, 'get_' + f)
+ except AttributeError:
+ pass
+ else:
+ setattr(self, f, func())
+ def convert(self, node, val):
+ pass
+ def merge(self, other):
+ self.__dict__.update(other.__dict__)
+ def format(self, field_list=None, names=0):
+ if field_list is None:
+ try:
+ field_list = self.field_list
+ except AttributeError:
+ field_list = self.__dict__.keys()
+ field_list.sort()
+ fields = []
+ for field in field_list:
+ try:
+ f = getattr(self, field)
+ except AttributeError:
+ f = None
+ f = str(f)
+ if names:
+ f = field + ': ' + f
+ fields.append(f)
+ return fields
+
+class BuildInfoBase:
+ """
+ The generic base class for build information for a Node.
+
+ This is what gets stored in a .sconsign file for each target file.
+ It contains a NodeInfo instance for this node (signature information
+ that's specific to the type of Node) and direct attributes for the
+ generic build stuff we have to track: sources, explicit dependencies,
+ implicit dependencies, and action information.
+ """
+ current_version_id = 1
+ def __init__(self, node):
+ # Create an object attribute from the class attribute so it ends up
+ # in the pickled data in the .sconsign file.
+ self._version_id = self.current_version_id
+ self.bsourcesigs = []
+ self.bdependsigs = []
+ self.bimplicitsigs = []
+ self.bactsig = None
+ def merge(self, other):
+ self.__dict__.update(other.__dict__)
+
+class Node:
+ """The base Node class, for entities that we know how to
+ build, or use to build other Nodes.
+ """
+
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ class Attrs:
+ pass
+
+ def __init__(self):
+ if __debug__: logInstanceCreation(self, 'Node.Node')
+ # Note that we no longer explicitly initialize a self.builder
+ # attribute to None here. That's because the self.builder
+ # attribute may be created on-the-fly later by a subclass (the
+ # canonical example being a builder to fetch a file from a
+ # source code system like CVS or Subversion).
+
+ # Each list of children that we maintain is accompanied by a
+ # dictionary used to look up quickly whether a node is already
+ # present in the list. Empirical tests showed that it was
+ # fastest to maintain them as side-by-side Node attributes in
+ # this way, instead of wrapping up each list+dictionary pair in
+ # a class. (Of course, we could always still do that in the
+ # future if we had a good reason to...).
+ self.sources = [] # source files used to build node
+ self.sources_set = set()
+ self._specific_sources = False
+ self.depends = [] # explicit dependencies (from Depends)
+ self.depends_set = set()
+ self.ignore = [] # dependencies to ignore
+ self.ignore_set = set()
+ self.prerequisites = SCons.Util.UniqueList()
+ self.implicit = None # implicit (scanned) dependencies (None means not scanned yet)
+ self.waiting_parents = set()
+ self.waiting_s_e = set()
+ self.ref_count = 0
+ self.wkids = None # Kids yet to walk, when it's an array
+
+ self.env = None
+ self.state = no_state
+ self.precious = None
+ self.noclean = 0
+ self.nocache = 0
+ self.always_build = None
+ self.includes = None
+ self.attributes = self.Attrs() # Generic place to stick information about the Node.
+ self.side_effect = 0 # true iff this node is a side effect
+ self.side_effects = [] # the side effects of building this target
+ self.linked = 0 # is this node linked to the variant directory?
+
+ self.clear_memoized_values()
+
+ # Let the interface in which the build engine is embedded
+ # annotate this Node with its own info (like a description of
+ # what line in what file created the node, for example).
+ Annotate(self)
+
+ def disambiguate(self, must_exist=None):
+ return self
+
+ def get_suffix(self):
+ return ''
+
+ memoizer_counters.append(SCons.Memoize.CountValue('get_build_env'))
+
+ def get_build_env(self):
+ """Fetch the appropriate Environment to build this node.
+ """
+ try:
+ return self._memo['get_build_env']
+ except KeyError:
+ pass
+ result = self.get_executor().get_build_env()
+ self._memo['get_build_env'] = result
+ return result
+
+ def get_build_scanner_path(self, scanner):
+ """Fetch the appropriate scanner path for this node."""
+ return self.get_executor().get_build_scanner_path(scanner)
+
+ def set_executor(self, executor):
+ """Set the action executor for this node."""
+ self.executor = executor
+
+ def get_executor(self, create=1):
+ """Fetch the action executor for this node. Create one if
+ there isn't already one, and requested to do so."""
+ try:
+ executor = self.executor
+ except AttributeError:
+ if not create:
+ raise
+ try:
+ act = self.builder.action
+ except AttributeError:
+ executor = SCons.Executor.Null(targets=[self])
+ else:
+ executor = SCons.Executor.Executor(act,
+ self.env or self.builder.env,
+ [self.builder.overrides],
+ [self],
+ self.sources)
+ self.executor = executor
+ return executor
+
+ def executor_cleanup(self):
+ """Let the executor clean up any cached information."""
+ try:
+ executor = self.get_executor(create=None)
+ except AttributeError:
+ pass
+ else:
+ executor.cleanup()
+
+ def reset_executor(self):
+ "Remove cached executor; forces recompute when needed."
+ try:
+ delattr(self, 'executor')
+ except AttributeError:
+ pass
+
+ def retrieve_from_cache(self):
+ """Try to retrieve the node's content from a cache
+
+ This method is called from multiple threads in a parallel build,
+ so only do thread safe stuff here. Do thread unsafe stuff in
+ built().
+
+ Returns true iff the node was successfully retrieved.
+ """
+ return 0
+
+ #
+ # Taskmaster interface subsystem
+ #
+
+ def make_ready(self):
+ """Get a Node ready for evaluation.
+
+ This is called before the Taskmaster decides if the Node is
+ up-to-date or not. Overriding this method allows for a Node
+ subclass to be disambiguated if necessary, or for an implicit
+ source builder to be attached.
+ """
+ pass
+
+ def prepare(self):
+ """Prepare for this Node to be built.
+
+ This is called after the Taskmaster has decided that the Node
+ is out-of-date and must be rebuilt, but before actually calling
+ the method to build the Node.
+
+ This default implementation checks that explicit or implicit
+ dependencies either exist or are derived, and initializes the
+ BuildInfo structure that will hold the information about how
+ this node is, uh, built.
+
+ (The existence of source files is checked separately by the
+ Executor, which aggregates checks for all of the targets built
+ by a specific action.)
+
+ Overriding this method allows for for a Node subclass to remove
+ the underlying file from the file system. Note that subclass
+ methods should call this base class method to get the child
+ check and the BuildInfo structure.
+ """
+ for d in self.depends:
+ if d.missing():
+ msg = "Explicit dependency `%s' not found, needed by target `%s'."
+ raise SCons.Errors.StopError, msg % (d, self)
+ if not self.implicit is None:
+ for i in self.implicit:
+ if i.missing():
+ msg = "Implicit dependency `%s' not found, needed by target `%s'."
+ raise SCons.Errors.StopError, msg % (i, self)
+ self.binfo = self.get_binfo()
+
+ def build(self, **kw):
+ """Actually build the node.
+
+ This is called by the Taskmaster after it's decided that the
+ Node is out-of-date and must be rebuilt, and after the prepare()
+ method has gotten everything, uh, prepared.
+
+ This method is called from multiple threads in a parallel build,
+ so only do thread safe stuff here. Do thread unsafe stuff
+ in built().
+
+ """
+ try:
+ apply(self.get_executor(), (self,), kw)
+ except SCons.Errors.BuildError, e:
+ e.node = self
+ raise
+
+ def built(self):
+ """Called just after this node is successfully built."""
+
+ # Clear the implicit dependency caches of any Nodes
+ # waiting for this Node to be built.
+ for parent in self.waiting_parents:
+ parent.implicit = None
+
+ self.clear()
+
+ self.ninfo.update(self)
+
+ def visited(self):
+ """Called just after this node has been visited (with or
+ without a build)."""
+ try:
+ binfo = self.binfo
+ except AttributeError:
+ # Apparently this node doesn't need build info, so
+ # don't bother calculating or storing it.
+ pass
+ else:
+ self.ninfo.update(self)
+ self.store_info()
+
+ #
+ #
+ #
+
+ def add_to_waiting_s_e(self, node):
+ self.waiting_s_e.add(node)
+
+ def add_to_waiting_parents(self, node):
+ """
+ Returns the number of nodes added to our waiting parents list:
+ 1 if we add a unique waiting parent, 0 if not. (Note that the
+ returned values are intended to be used to increment a reference
+ count, so don't think you can "clean up" this function by using
+ True and False instead...)
+ """
+ wp = self.waiting_parents
+ if node in wp:
+ return 0
+ wp.add(node)
+ return 1
+
+ def postprocess(self):
+ """Clean up anything we don't need to hang onto after we've
+ been built."""
+ self.executor_cleanup()
+ self.waiting_parents = set()
+
+ def clear(self):
+ """Completely clear a Node of all its cached state (so that it
+ can be re-evaluated by interfaces that do continuous integration
+ builds).
+ """
+ # The del_binfo() call here isn't necessary for normal execution,
+ # but is for interactive mode, where we might rebuild the same
+ # target and need to start from scratch.
+ self.del_binfo()
+ self.clear_memoized_values()
+ self.ninfo = self.new_ninfo()
+ self.executor_cleanup()
+ try:
+ delattr(self, '_calculated_sig')
+ except AttributeError:
+ pass
+ self.includes = None
+
+ def clear_memoized_values(self):
+ self._memo = {}
+
+ def builder_set(self, builder):
+ self.builder = builder
+ try:
+ del self.executor
+ except AttributeError:
+ pass
+
+ def has_builder(self):
+ """Return whether this Node has a builder or not.
+
+ In Boolean tests, this turns out to be a *lot* more efficient
+ than simply examining the builder attribute directly ("if
+ node.builder: ..."). When the builder attribute is examined
+ directly, it ends up calling __getattr__ for both the __len__
+ and __nonzero__ attributes on instances of our Builder Proxy
+ class(es), generating a bazillion extra calls and slowing
+ things down immensely.
+ """
+ try:
+ b = self.builder
+ except AttributeError:
+ # There was no explicit builder for this Node, so initialize
+ # the self.builder attribute to None now.
+ b = self.builder = None
+ return not b is None
+
+ def set_explicit(self, is_explicit):
+ self.is_explicit = is_explicit
+
+ def has_explicit_builder(self):
+ """Return whether this Node has an explicit builder
+
+ This allows an internal Builder created by SCons to be marked
+ non-explicit, so that it can be overridden by an explicit
+ builder that the user supplies (the canonical example being
+ directories)."""
+ try:
+ return self.is_explicit
+ except AttributeError:
+ self.is_explicit = None
+ return self.is_explicit
+
+ def get_builder(self, default_builder=None):
+ """Return the set builder, or a specified default value"""
+ try:
+ return self.builder
+ except AttributeError:
+ return default_builder
+
+ multiple_side_effect_has_builder = has_builder
+
+ def is_derived(self):
+ """
+ Returns true iff this node is derived (i.e. built).
+
+ This should return true only for nodes whose path should be in
+ the variant directory when duplicate=0 and should contribute their build
+ signatures when they are used as source files to other derived files. For
+ example: source with source builders are not derived in this sense,
+ and hence should not return true.
+ """
+ return self.has_builder() or self.side_effect
+
+ def alter_targets(self):
+ """Return a list of alternate targets for this Node.
+ """
+ return [], None
+
+ def get_found_includes(self, env, scanner, path):
+ """Return the scanned include lines (implicit dependencies)
+ found in this node.
+
+ The default is no implicit dependencies. We expect this method
+ to be overridden by any subclass that can be scanned for
+ implicit dependencies.
+ """
+ return []
+
+ def get_implicit_deps(self, env, scanner, path):
+ """Return a list of implicit dependencies for this node.
+
+ This method exists to handle recursive invocation of the scanner
+ on the implicit dependencies returned by the scanner, if the
+ scanner's recursive flag says that we should.
+ """
+ if not scanner:
+ return []
+
+ # Give the scanner a chance to select a more specific scanner
+ # for this Node.
+ #scanner = scanner.select(self)
+
+ nodes = [self]
+ seen = {}
+ seen[self] = 1
+ deps = []
+ while nodes:
+ n = nodes.pop(0)
+ d = filter(lambda x, seen=seen: not seen.has_key(x),
+ n.get_found_includes(env, scanner, path))
+ if d:
+ deps.extend(d)
+ for n in d:
+ seen[n] = 1
+ nodes.extend(scanner.recurse_nodes(d))
+
+ return deps
+
+ def get_env_scanner(self, env, kw={}):
+ return env.get_scanner(self.scanner_key())
+
+ def get_target_scanner(self):
+ return self.builder.target_scanner
+
+ def get_source_scanner(self, node):
+ """Fetch the source scanner for the specified node
+
+ NOTE: "self" is the target being built, "node" is
+ the source file for which we want to fetch the scanner.
+
+ Implies self.has_builder() is true; again, expect to only be
+ called from locations where this is already verified.
+
+ This function may be called very often; it attempts to cache
+ the scanner found to improve performance.
+ """
+ scanner = None
+ try:
+ scanner = self.builder.source_scanner
+ except AttributeError:
+ pass
+ if not scanner:
+ # The builder didn't have an explicit scanner, so go look up
+ # a scanner from env['SCANNERS'] based on the node's scanner
+ # key (usually the file extension).
+ scanner = self.get_env_scanner(self.get_build_env())
+ if scanner:
+ scanner = scanner.select(node)
+ return scanner
+
+ def add_to_implicit(self, deps):
+ if not hasattr(self, 'implicit') or self.implicit is None:
+ self.implicit = []
+ self.implicit_set = set()
+ self._children_reset()
+ self._add_child(self.implicit, self.implicit_set, deps)
+
+ def scan(self):
+ """Scan this node's dependents for implicit dependencies."""
+ # Don't bother scanning non-derived files, because we don't
+ # care what their dependencies are.
+ # Don't scan again, if we already have scanned.
+ if not self.implicit is None:
+ return
+ self.implicit = []
+ self.implicit_set = set()
+ self._children_reset()
+ if not self.has_builder():
+ return
+
+ build_env = self.get_build_env()
+ executor = self.get_executor()
+
+ # Here's where we implement --implicit-cache.
+ if implicit_cache and not implicit_deps_changed:
+ implicit = self.get_stored_implicit()
+ if implicit is not None:
+ # We now add the implicit dependencies returned from the
+ # stored .sconsign entry to have already been converted
+ # to Nodes for us. (We used to run them through a
+ # source_factory function here.)
+
+ # Update all of the targets with them. This
+ # essentially short-circuits an N*M scan of the
+ # sources for each individual target, which is a hell
+ # of a lot more efficient.
+ for tgt in executor.targets:
+ tgt.add_to_implicit(implicit)
+
+ if implicit_deps_unchanged or self.is_up_to_date():
+ return
+ # one of this node's sources has changed,
+ # so we must recalculate the implicit deps:
+ self.implicit = []
+ self.implicit_set = set()
+
+ # Have the executor scan the sources.
+ executor.scan_sources(self.builder.source_scanner)
+
+ # If there's a target scanner, have the executor scan the target
+ # node itself and associated targets that might be built.
+ scanner = self.get_target_scanner()
+ if scanner:
+ executor.scan_targets(scanner)
+
+ def scanner_key(self):
+ return None
+
+ def select_scanner(self, scanner):
+ """Selects a scanner for this Node.
+
+ This is a separate method so it can be overridden by Node
+ subclasses (specifically, Node.FS.Dir) that *must* use their
+ own Scanner and don't select one the Scanner.Selector that's
+ configured for the target.
+ """
+ return scanner.select(self)
+
+ def env_set(self, env, safe=0):
+ if safe and self.env:
+ return
+ self.env = env
+
+ #
+ # SIGNATURE SUBSYSTEM
+ #
+
+ NodeInfo = NodeInfoBase
+ BuildInfo = BuildInfoBase
+
+ def new_ninfo(self):
+ ninfo = self.NodeInfo(self)
+ return ninfo
+
+ def get_ninfo(self):
+ try:
+ return self.ninfo
+ except AttributeError:
+ self.ninfo = self.new_ninfo()
+ return self.ninfo
+
+ def new_binfo(self):
+ binfo = self.BuildInfo(self)
+ return binfo
+
+ def get_binfo(self):
+ """
+ Fetch a node's build information.
+
+ node - the node whose sources will be collected
+ cache - alternate node to use for the signature cache
+ returns - the build signature
+
+ This no longer handles the recursive descent of the
+ node's children's signatures. We expect that they're
+ already built and updated by someone else, if that's
+ what's wanted.
+ """
+ try:
+ return self.binfo
+ except AttributeError:
+ pass
+
+ binfo = self.new_binfo()
+ self.binfo = binfo
+
+ executor = self.get_executor()
+ ignore_set = self.ignore_set
+
+ if self.has_builder():
+ binfo.bact = str(executor)
+ binfo.bactsig = SCons.Util.MD5signature(executor.get_contents())
+
+ if self._specific_sources:
+ sources = []
+ for s in self.sources:
+ if s not in ignore_set:
+ sources.append(s)
+ else:
+ sources = executor.get_unignored_sources(self.ignore)
+ seen = set()
+ bsources = []
+ bsourcesigs = []
+ for s in sources:
+ if not s in seen:
+ seen.add(s)
+ bsources.append(s)
+ bsourcesigs.append(s.get_ninfo())
+ binfo.bsources = bsources
+ binfo.bsourcesigs = bsourcesigs
+
+ depends = self.depends
+ dependsigs = []
+ for d in depends:
+ if d not in ignore_set:
+ dependsigs.append(d.get_ninfo())
+ binfo.bdepends = depends
+ binfo.bdependsigs = dependsigs
+
+ implicit = self.implicit or []
+ implicitsigs = []
+ for i in implicit:
+ if i not in ignore_set:
+ implicitsigs.append(i.get_ninfo())
+ binfo.bimplicit = implicit
+ binfo.bimplicitsigs = implicitsigs
+
+ return binfo
+
+ def del_binfo(self):
+ """Delete the build info from this node."""
+ try:
+ delattr(self, 'binfo')
+ except AttributeError:
+ pass
+
+ def get_csig(self):
+ try:
+ return self.ninfo.csig
+ except AttributeError:
+ ninfo = self.get_ninfo()
+ ninfo.csig = SCons.Util.MD5signature(self.get_contents())
+ return self.ninfo.csig
+
+ def get_cachedir_csig(self):
+ return self.get_csig()
+
+ def store_info(self):
+ """Make the build signature permanent (that is, store it in the
+ .sconsign file or equivalent)."""
+ pass
+
+ def do_not_store_info(self):
+ pass
+
+ def get_stored_info(self):
+ return None
+
+ def get_stored_implicit(self):
+ """Fetch the stored implicit dependencies"""
+ return None
+
+ #
+ #
+ #
+
+ def set_precious(self, precious = 1):
+ """Set the Node's precious value."""
+ self.precious = precious
+
+ def set_noclean(self, noclean = 1):
+ """Set the Node's noclean value."""
+ # Make sure noclean is an integer so the --debug=stree
+ # output in Util.py can use it as an index.
+ self.noclean = noclean and 1 or 0
+
+ def set_nocache(self, nocache = 1):
+ """Set the Node's nocache value."""
+ # Make sure nocache is an integer so the --debug=stree
+ # output in Util.py can use it as an index.
+ self.nocache = nocache and 1 or 0
+
+ def set_always_build(self, always_build = 1):
+ """Set the Node's always_build value."""
+ self.always_build = always_build
+
+ def exists(self):
+ """Does this node exists?"""
+ # All node exist by default:
+ return 1
+
+ def rexists(self):
+ """Does this node exist locally or in a repositiory?"""
+ # There are no repositories by default:
+ return self.exists()
+
+ def missing(self):
+ return not self.is_derived() and \
+ not self.linked and \
+ not self.rexists()
+
+ def remove(self):
+ """Remove this Node: no-op by default."""
+ return None
+
+ def add_dependency(self, depend):
+ """Adds dependencies."""
+ try:
+ self._add_child(self.depends, self.depends_set, depend)
+ except TypeError, e:
+ e = e.args[0]
+ if SCons.Util.is_List(e):
+ s = map(str, e)
+ else:
+ s = str(e)
+ raise SCons.Errors.UserError("attempted to add a non-Node dependency to %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
+
+ def add_prerequisite(self, prerequisite):
+ """Adds prerequisites"""
+ self.prerequisites.extend(prerequisite)
+ self._children_reset()
+
+ def add_ignore(self, depend):
+ """Adds dependencies to ignore."""
+ try:
+ self._add_child(self.ignore, self.ignore_set, depend)
+ except TypeError, e:
+ e = e.args[0]
+ if SCons.Util.is_List(e):
+ s = map(str, e)
+ else:
+ s = str(e)
+ raise SCons.Errors.UserError("attempted to ignore a non-Node dependency of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
+
+ def add_source(self, source):
+ """Adds sources."""
+ if self._specific_sources:
+ return
+ try:
+ self._add_child(self.sources, self.sources_set, source)
+ except TypeError, e:
+ e = e.args[0]
+ if SCons.Util.is_List(e):
+ s = map(str, e)
+ else:
+ s = str(e)
+ raise SCons.Errors.UserError("attempted to add a non-Node as source of %s:\n\t%s is a %s, not a Node" % (str(self), s, type(e)))
+
+ def _add_child(self, collection, set, child):
+ """Adds 'child' to 'collection', first checking 'set' to see if it's
+ already present."""
+ #if type(child) is not type([]):
+ # child = [child]
+ #for c in child:
+ # if not isinstance(c, Node):
+ # raise TypeError, c
+ added = None
+ for c in child:
+ if c not in set:
+ set.add(c)
+ collection.append(c)
+ added = 1
+ if added:
+ self._children_reset()
+
+ def set_specific_source(self, source):
+ self.add_source(source)
+ self._specific_sources = True
+
+ def add_wkid(self, wkid):
+ """Add a node to the list of kids waiting to be evaluated"""
+ if self.wkids != None:
+ self.wkids.append(wkid)
+
+ def _children_reset(self):
+ self.clear_memoized_values()
+ # We need to let the Executor clear out any calculated
+ # build info that it's cached so we can re-calculate it.
+ self.executor_cleanup()
+
+ memoizer_counters.append(SCons.Memoize.CountValue('_children_get'))
+
+ def _children_get(self):
+ try:
+ return self._memo['children_get']
+ except KeyError:
+ pass
+
+ # The return list may contain duplicate Nodes, especially in
+ # source trees where there are a lot of repeated #includes
+ # of a tangle of .h files. Profiling shows, however, that
+ # eliminating the duplicates with a brute-force approach that
+ # preserves the order (that is, something like:
+ #
+ # u = []
+ # for n in list:
+ # if n not in u:
+ # u.append(n)"
+ #
+ # takes more cycles than just letting the underlying methods
+ # hand back cached values if a Node's information is requested
+ # multiple times. (Other methods of removing duplicates, like
+ # using dictionary keys, lose the order, and the only ordered
+ # dictionary patterns I found all ended up using "not in"
+ # internally anyway...)
+ if self.ignore_set:
+ if self.implicit is None:
+ iter = chain(self.sources,self.depends)
+ else:
+ iter = chain(self.sources, self.depends, self.implicit)
+
+ children = []
+ for i in iter:
+ if i not in self.ignore_set:
+ children.append(i)
+ else:
+ if self.implicit is None:
+ children = self.sources + self.depends
+ else:
+ children = self.sources + self.depends + self.implicit
+
+ self._memo['children_get'] = children
+ return children
+
+ def all_children(self, scan=1):
+ """Return a list of all the node's direct children."""
+ if scan:
+ self.scan()
+
+ # The return list may contain duplicate Nodes, especially in
+ # source trees where there are a lot of repeated #includes
+ # of a tangle of .h files. Profiling shows, however, that
+ # eliminating the duplicates with a brute-force approach that
+ # preserves the order (that is, something like:
+ #
+ # u = []
+ # for n in list:
+ # if n not in u:
+ # u.append(n)"
+ #
+ # takes more cycles than just letting the underlying methods
+ # hand back cached values if a Node's information is requested
+ # multiple times. (Other methods of removing duplicates, like
+ # using dictionary keys, lose the order, and the only ordered
+ # dictionary patterns I found all ended up using "not in"
+ # internally anyway...)
+ if self.implicit is None:
+ return self.sources + self.depends
+ else:
+ return self.sources + self.depends + self.implicit
+
+ def children(self, scan=1):
+ """Return a list of the node's direct children, minus those
+ that are ignored by this node."""
+ if scan:
+ self.scan()
+ return self._children_get()
+
+ def set_state(self, state):
+ self.state = state
+
+ def get_state(self):
+ return self.state
+
+ def state_has_changed(self, target, prev_ni):
+ return (self.state != SCons.Node.up_to_date)
+
+ def get_env(self):
+ env = self.env
+ if not env:
+ import SCons.Defaults
+ env = SCons.Defaults.DefaultEnvironment()
+ return env
+
+ def changed_since_last_build(self, target, prev_ni):
+ """
+
+ Must be overridden in a specific subclass to return True if this
+ Node (a dependency) has changed since the last time it was used
+ to build the specified target. prev_ni is this Node's state (for
+ example, its file timestamp, length, maybe content signature)
+ as of the last time the target was built.
+
+ Note that this method is called through the dependency, not the
+ target, because a dependency Node must be able to use its own
+ logic to decide if it changed. For example, File Nodes need to
+ obey if we're configured to use timestamps, but Python Value Nodes
+ never use timestamps and always use the content. If this method
+ were called through the target, then each Node's implementation
+ of this method would have to have more complicated logic to
+ handle all the different Node types on which it might depend.
+ """
+ raise NotImplementedError
+
+ def Decider(self, function):
+ SCons.Util.AddMethod(self, function, 'changed_since_last_build')
+
+ def changed(self, node=None):
+ """
+ Returns if the node is up-to-date with respect to the BuildInfo
+ stored last time it was built. The default behavior is to compare
+ it against our own previously stored BuildInfo, but the stored
+ BuildInfo from another Node (typically one in a Repository)
+ can be used instead.
+
+ Note that we now *always* check every dependency. We used to
+ short-circuit the check by returning as soon as we detected
+ any difference, but we now rely on checking every dependency
+ to make sure that any necessary Node information (for example,
+ the content signature of an #included .h file) is updated.
+ """
+ t = 0
+ if t: Trace('changed(%s [%s], %s)' % (self, classname(self), node))
+ if node is None:
+ node = self
+
+ result = False
+
+ bi = node.get_stored_info().binfo
+ then = bi.bsourcesigs + bi.bdependsigs + bi.bimplicitsigs
+ children = self.children()
+
+ diff = len(children) - len(then)
+ if diff:
+ # The old and new dependency lists are different lengths.
+ # This always indicates that the Node must be rebuilt.
+ # We also extend the old dependency list with enough None
+ # entries to equal the new dependency list, for the benefit
+ # of the loop below that updates node information.
+ then.extend([None] * diff)
+ if t: Trace(': old %s new %s' % (len(then), len(children)))
+ result = True
+
+ for child, prev_ni in izip(children, then):
+ if child.changed_since_last_build(self, prev_ni):
+ if t: Trace(': %s changed' % child)
+ result = True
+
+ contents = self.get_executor().get_contents()
+ if self.has_builder():
+ import SCons.Util
+ newsig = SCons.Util.MD5signature(contents)
+ if bi.bactsig != newsig:
+ if t: Trace(': bactsig %s != newsig %s' % (bi.bactsig, newsig))
+ result = True
+
+ if not result:
+ if t: Trace(': up to date')
+
+ if t: Trace('\n')
+
+ return result
+
+ def is_up_to_date(self):
+ """Default check for whether the Node is current: unknown Node
+ subtypes are always out of date, so they will always get built."""
+ return None
+
+ def children_are_up_to_date(self):
+ """Alternate check for whether the Node is current: If all of
+ our children were up-to-date, then this Node was up-to-date, too.
+
+ The SCons.Node.Alias and SCons.Node.Python.Value subclasses
+ rebind their current() method to this method."""
+ # Allow the children to calculate their signatures.
+ self.binfo = self.get_binfo()
+ if self.always_build:
+ return None
+ state = 0
+ for kid in self.children(None):
+ s = kid.get_state()
+ if s and (not state or s > state):
+ state = s
+ return (state == 0 or state == SCons.Node.up_to_date)
+
+ def is_literal(self):
+ """Always pass the string representation of a Node to
+ the command interpreter literally."""
+ return 1
+
+ def render_include_tree(self):
+ """
+ Return a text representation, suitable for displaying to the
+ user, of the include tree for the sources of this node.
+ """
+ if self.is_derived() and self.env:
+ env = self.get_build_env()
+ for s in self.sources:
+ scanner = self.get_source_scanner(s)
+ if scanner:
+ path = self.get_build_scanner_path(scanner)
+ else:
+ path = None
+ def f(node, env=env, scanner=scanner, path=path):
+ return node.get_found_includes(env, scanner, path)
+ return SCons.Util.render_tree(s, f, 1)
+ else:
+ return None
+
+ def get_abspath(self):
+ """
+ Return an absolute path to the Node. This will return simply
+ str(Node) by default, but for Node types that have a concept of
+ relative path, this might return something different.
+ """
+ return str(self)
+
+ def for_signature(self):
+ """
+ Return a string representation of the Node that will always
+ be the same for this particular Node, no matter what. This
+ is by contrast to the __str__() method, which might, for
+ instance, return a relative path for a file Node. The purpose
+ of this method is to generate a value to be used in signature
+ calculation for the command line used to build a target, and
+ we use this method instead of str() to avoid unnecessary
+ rebuilds. This method does not need to return something that
+ would actually work in a command line; it can return any kind of
+ nonsense, so long as it does not change.
+ """
+ return str(self)
+
+ def get_string(self, for_signature):
+ """This is a convenience function designed primarily to be
+ used in command generators (i.e., CommandGeneratorActions or
+ Environment variables that are callable), which are called
+ with a for_signature argument that is nonzero if the command
+ generator is being called to generate a signature for the
+ command line, which determines if we should rebuild or not.
+
+ Such command generators should use this method in preference
+ to str(Node) when converting a Node to a string, passing
+ in the for_signature parameter, such that we will call
+ Node.for_signature() or str(Node) properly, depending on whether
+ we are calculating a signature or actually constructing a
+ command line."""
+ if for_signature:
+ return self.for_signature()
+ return str(self)
+
+ def get_subst_proxy(self):
+ """
+ This method is expected to return an object that will function
+ exactly like this Node, except that it implements any additional
+ special features that we would like to be in effect for
+ Environment variable substitution. The principle use is that
+ some Nodes would like to implement a __getattr__() method,
+ but putting that in the Node type itself has a tendency to kill
+ performance. We instead put it in a proxy and return it from
+ this method. It is legal for this method to return self
+ if no new functionality is needed for Environment substitution.
+ """
+ return self
+
+ def explain(self):
+ if not self.exists():
+ return "building `%s' because it doesn't exist\n" % self
+
+ if self.always_build:
+ return "rebuilding `%s' because AlwaysBuild() is specified\n" % self
+
+ old = self.get_stored_info()
+ if old is None:
+ return None
+
+ old = old.binfo
+ old.prepare_dependencies()
+
+ try:
+ old_bkids = old.bsources + old.bdepends + old.bimplicit
+ old_bkidsigs = old.bsourcesigs + old.bdependsigs + old.bimplicitsigs
+ except AttributeError:
+ return "Cannot explain why `%s' is being rebuilt: No previous build information found\n" % self
+
+ new = self.get_binfo()
+
+ new_bkids = new.bsources + new.bdepends + new.bimplicit
+ new_bkidsigs = new.bsourcesigs + new.bdependsigs + new.bimplicitsigs
+
+ osig = dict(izip(old_bkids, old_bkidsigs))
+ nsig = dict(izip(new_bkids, new_bkidsigs))
+
+ # The sources and dependencies we'll want to report are all stored
+ # as relative paths to this target's directory, but we want to
+ # report them relative to the top-level SConstruct directory,
+ # so we only print them after running them through this lambda
+ # to turn them into the right relative Node and then return
+ # its string.
+ def stringify( s, E=self.dir.Entry ) :
+ if hasattr( s, 'dir' ) :
+ return str(E(s))
+ return str(s)
+
+ lines = []
+
+ removed = filter(lambda x, nk=new_bkids: not x in nk, old_bkids)
+ if removed:
+ removed = map(stringify, removed)
+ fmt = "`%s' is no longer a dependency\n"
+ lines.extend(map(lambda s, fmt=fmt: fmt % s, removed))
+
+ for k in new_bkids:
+ if not k in old_bkids:
+ lines.append("`%s' is a new dependency\n" % stringify(k))
+ elif k.changed_since_last_build(self, osig[k]):
+ lines.append("`%s' changed\n" % stringify(k))
+
+ if len(lines) == 0 and old_bkids != new_bkids:
+ lines.append("the dependency order changed:\n" +
+ "%sold: %s\n" % (' '*15, map(stringify, old_bkids)) +
+ "%snew: %s\n" % (' '*15, map(stringify, new_bkids)))
+
+ if len(lines) == 0:
+ def fmt_with_title(title, strlines):
+ lines = string.split(strlines, '\n')
+ sep = '\n' + ' '*(15 + len(title))
+ return ' '*15 + title + string.join(lines, sep) + '\n'
+ if old.bactsig != new.bactsig:
+ if old.bact == new.bact:
+ lines.append("the contents of the build action changed\n" +
+ fmt_with_title('action: ', new.bact))
+ else:
+ lines.append("the build action changed:\n" +
+ fmt_with_title('old: ', old.bact) +
+ fmt_with_title('new: ', new.bact))
+
+ if len(lines) == 0:
+ return "rebuilding `%s' for unknown reasons\n" % self
+
+ preamble = "rebuilding `%s' because" % self
+ if len(lines) == 1:
+ return "%s %s" % (preamble, lines[0])
+ else:
+ lines = ["%s:\n" % preamble] + lines
+ return string.join(lines, ' '*11)
+
+try:
+ [].extend(UserList.UserList([]))
+except TypeError:
+ # Python 1.5.2 doesn't allow a list to be extended by list-like
+ # objects (such as UserList instances), so just punt and use
+ # real lists.
+ def NodeList(l):
+ return l
+else:
+ class NodeList(UserList.UserList):
+ def __str__(self):
+ return str(map(str, self.data))
+
+def get_children(node, parent): return node.children()
+def ignore_cycle(node, stack): pass
+def do_nothing(node, parent): pass
+
+class Walker:
+ """An iterator for walking a Node tree.
+
+ This is depth-first, children are visited before the parent.
+ The Walker object can be initialized with any node, and
+ returns the next node on the descent with each next() call.
+ 'kids_func' is an optional function that will be called to
+ get the children of a node instead of calling 'children'.
+ 'cycle_func' is an optional function that will be called
+ when a cycle is detected.
+
+ This class does not get caught in node cycles caused, for example,
+ by C header file include loops.
+ """
+ def __init__(self, node, kids_func=get_children,
+ cycle_func=ignore_cycle,
+ eval_func=do_nothing):
+ self.kids_func = kids_func
+ self.cycle_func = cycle_func
+ self.eval_func = eval_func
+ node.wkids = copy.copy(kids_func(node, None))
+ self.stack = [node]
+ self.history = {} # used to efficiently detect and avoid cycles
+ self.history[node] = None
+
+ def next(self):
+ """Return the next node for this walk of the tree.
+
+ This function is intentionally iterative, not recursive,
+ to sidestep any issues of stack size limitations.
+ """
+
+ while self.stack:
+ if self.stack[-1].wkids:
+ node = self.stack[-1].wkids.pop(0)
+ if not self.stack[-1].wkids:
+ self.stack[-1].wkids = None
+ if self.history.has_key(node):
+ self.cycle_func(node, self.stack)
+ else:
+ node.wkids = copy.copy(self.kids_func(node, self.stack[-1]))
+ self.stack.append(node)
+ self.history[node] = None
+ else:
+ node = self.stack.pop()
+ del self.history[node]
+ if node:
+ if self.stack:
+ parent = self.stack[-1]
+ else:
+ parent = None
+ self.eval_func(node, parent)
+ return node
+ return None
+
+ def is_done(self):
+ return not self.stack
+
+
+arg2nodes_lookups = []
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/BoolOption.py b/tools/scons/scons-local-1.2.0/SCons/Options/BoolOption.py
new file mode 100644
index 000000000..c5fed0a14
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/BoolOption.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/BoolOption.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+warned = False
+
+def BoolOption(*args, **kw):
+ global warned
+ if not warned:
+ msg = "The BoolOption() function is deprecated; use the BoolVariable() function instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+ return apply(SCons.Variables.BoolVariable, args, kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/EnumOption.py b/tools/scons/scons-local-1.2.0/SCons/Options/EnumOption.py
new file mode 100644
index 000000000..4f50d01b8
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/EnumOption.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/EnumOption.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+warned = False
+
+def EnumOption(*args, **kw):
+ global warned
+ if not warned:
+ msg = "The EnumOption() function is deprecated; use the EnumVariable() function instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+ return apply(SCons.Variables.EnumVariable, args, kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/ListOption.py b/tools/scons/scons-local-1.2.0/SCons/Options/ListOption.py
new file mode 100644
index 000000000..b4cd923ad
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/ListOption.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/ListOption.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+warned = False
+
+def ListOption(*args, **kw):
+ global warned
+ if not warned:
+ msg = "The ListOption() function is deprecated; use the ListVariable() function instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+ return apply(SCons.Variables.ListVariable, args, kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/PackageOption.py b/tools/scons/scons-local-1.2.0/SCons/Options/PackageOption.py
new file mode 100644
index 000000000..7fcbe5f1d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/PackageOption.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/PackageOption.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+warned = False
+
+def PackageOption(*args, **kw):
+ global warned
+ if not warned:
+ msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+ return apply(SCons.Variables.PackageVariable, args, kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/PathOption.py b/tools/scons/scons-local-1.2.0/SCons/Options/PathOption.py
new file mode 100644
index 000000000..649fc45ea
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/PathOption.py
@@ -0,0 +1,70 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/PathOption.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+warned = False
+
+class _PathOptionClass:
+ def warn(self):
+ global warned
+ if not warned:
+ msg = "The PathOption() function is deprecated; use the PathVariable() function instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+
+ def __call__(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable, args, kw)
+
+ def PathAccept(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable.PathAccept, args, kw)
+
+ def PathIsDir(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable.PathIsDir, args, kw)
+
+ def PathIsDirCreate(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable.PathIsDirCreate, args, kw)
+
+ def PathIsFile(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable.PathIsFile, args, kw)
+
+ def PathExists(self, *args, **kw):
+ self.warn()
+ return apply(SCons.Variables.PathVariable.PathExists, args, kw)
+
+PathOption = _PathOptionClass()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Options/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Options/__init__.py
new file mode 100644
index 000000000..3e41b8d63
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Options/__init__.py
@@ -0,0 +1,68 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Options/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Options module hierarchy
+
+This is for backwards compatibility. The new equivalent is the Variables/
+class hierarchy. These will have deprecation warnings added (some day),
+and will then be removed entirely (some day).
+"""
+
+import SCons.Variables
+import SCons.Warnings
+
+from BoolOption import BoolOption # okay
+from EnumOption import EnumOption # okay
+from ListOption import ListOption # naja
+from PackageOption import PackageOption # naja
+from PathOption import PathOption # okay
+
+warned = False
+
+class Options(SCons.Variables.Variables):
+ def __init__(self, *args, **kw):
+ global warned
+ if not warned:
+ msg = "The Options class is deprecated; use the Variables class instead."
+ SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
+ warned = True
+ apply(SCons.Variables.Variables.__init__,
+ (self,) + args,
+ kw)
+
+ def AddOptions(self, *args, **kw):
+ return apply(SCons.Variables.Variables.AddVariables,
+ (self,) + args,
+ kw)
+
+ def UnknownOptions(self, *args, **kw):
+ return apply(SCons.Variables.Variables.UnknownVariables,
+ (self,) + args,
+ kw)
+
+ def FormatOptionHelpText(self, *args, **kw):
+ return apply(SCons.Variables.Variables.FormatVariableHelpText,
+ (self,) + args,
+ kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/PathList.py b/tools/scons/scons-local-1.2.0/SCons/PathList.py
new file mode 100644
index 000000000..8b877fa4f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/PathList.py
@@ -0,0 +1,226 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/PathList.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """SCons.PathList
+
+A module for handling lists of directory paths (the sort of things
+that get set as CPPPATH, LIBPATH, etc.) with as much caching of data and
+efficiency as we can while still keeping the evaluation delayed so that we
+Do the Right Thing (almost) regardless of how the variable is specified.
+
+"""
+
+import os
+import string
+
+import SCons.Memoize
+import SCons.Node
+import SCons.Util
+
+#
+# Variables to specify the different types of entries in a PathList object:
+#
+
+TYPE_STRING_NO_SUBST = 0 # string with no '$'
+TYPE_STRING_SUBST = 1 # string containing '$'
+TYPE_OBJECT = 2 # other object
+
+def node_conv(obj):
+ """
+ This is the "string conversion" routine that we have our substitutions
+ use to return Nodes, not strings. This relies on the fact that an
+ EntryProxy object has a get() method that returns the underlying
+ Node that it wraps, which is a bit of architectural dependence
+ that we might need to break or modify in the future in response to
+ additional requirements.
+ """
+ try:
+ get = obj.get
+ except AttributeError:
+ if isinstance(obj, SCons.Node.Node) or SCons.Util.is_Sequence( obj ):
+ result = obj
+ else:
+ result = str(obj)
+ else:
+ result = get()
+ return result
+
+class _PathList:
+ """
+ An actual PathList object.
+ """
+ def __init__(self, pathlist):
+ """
+ Initializes a PathList object, canonicalizing the input and
+ pre-processing it for quicker substitution later.
+
+ The stored representation of the PathList is a list of tuples
+ containing (type, value), where the "type" is one of the TYPE_*
+ variables defined above. We distinguish between:
+
+ strings that contain no '$' and therefore need no
+ delayed-evaluation string substitution (we expect that there
+ will be many of these and that we therefore get a pretty
+ big win from avoiding string substitution)
+
+ strings that contain '$' and therefore need substitution
+ (the hard case is things like '${TARGET.dir}/include',
+ which require re-evaluation for every target + source)
+
+ other objects (which may be something like an EntryProxy
+ that needs a method called to return a Node)
+
+ Pre-identifying the type of each element in the PathList up-front
+ and storing the type in the list of tuples is intended to reduce
+ the amount of calculation when we actually do the substitution
+ over and over for each target.
+ """
+ if SCons.Util.is_String(pathlist):
+ pathlist = string.split(pathlist, os.pathsep)
+ elif not SCons.Util.is_Sequence(pathlist):
+ pathlist = [pathlist]
+
+ pl = []
+ for p in pathlist:
+ try:
+ index = string.find(p, '$')
+ except (AttributeError, TypeError):
+ type = TYPE_OBJECT
+ else:
+ if index == -1:
+ type = TYPE_STRING_NO_SUBST
+ else:
+ type = TYPE_STRING_SUBST
+ pl.append((type, p))
+
+ self.pathlist = tuple(pl)
+
+ def __len__(self): return len(self.pathlist)
+
+ def __getitem__(self, i): return self.pathlist[i]
+
+ def subst_path(self, env, target, source):
+ """
+ Performs construction variable substitution on a pre-digested
+ PathList for a specific target and source.
+ """
+ result = []
+ for type, value in self.pathlist:
+ if type == TYPE_STRING_SUBST:
+ value = env.subst(value, target=target, source=source,
+ conv=node_conv)
+ if SCons.Util.is_Sequence(value):
+ result.extend(value)
+ continue
+
+ elif type == TYPE_OBJECT:
+ value = node_conv(value)
+ if value:
+ result.append(value)
+ return tuple(result)
+
+
+class PathListCache:
+ """
+ A class to handle caching of PathList lookups.
+
+ This class gets instantiated once and then deleted from the namespace,
+ so it's used as a Singleton (although we don't enforce that in the
+ usual Pythonic ways). We could have just made the cache a dictionary
+ in the module namespace, but putting it in this class allows us to
+ use the same Memoizer pattern that we use elsewhere to count cache
+ hits and misses, which is very valuable.
+
+ Lookup keys in the cache are computed by the _PathList_key() method.
+ Cache lookup should be quick, so we don't spend cycles canonicalizing
+ all forms of the same lookup key. For example, 'x:y' and ['x',
+ 'y'] logically represent the same list, but we don't bother to
+ split string representations and treat those two equivalently.
+ (Note, however, that we do, treat lists and tuples the same.)
+
+ The main type of duplication we're trying to catch will come from
+ looking up the same path list from two different clones of the
+ same construction environment. That is, given
+
+ env2 = env1.Clone()
+
+ both env1 and env2 will have the same CPPPATH value, and we can
+ cheaply avoid re-parsing both values of CPPPATH by using the
+ common value from this cache.
+ """
+ if SCons.Memoize.use_memoizer:
+ __metaclass__ = SCons.Memoize.Memoized_Metaclass
+
+ memoizer_counters = []
+
+ def __init__(self):
+ self._memo = {}
+
+ def _PathList_key(self, pathlist):
+ """
+ Returns the key for memoization of PathLists.
+
+ Note that we want this to be pretty quick, so we don't completely
+ canonicalize all forms of the same list. For example,
+ 'dir1:$ROOT/dir2' and ['$ROOT/dir1', 'dir'] may logically
+ represent the same list if you're executing from $ROOT, but
+ we're not going to bother splitting strings into path elements,
+ or massaging strings into Nodes, to identify that equivalence.
+ We just want to eliminate obvious redundancy from the normal
+ case of re-using exactly the same cloned value for a path.
+ """
+ if SCons.Util.is_Sequence(pathlist):
+ pathlist = tuple(SCons.Util.flatten(pathlist))
+ return pathlist
+
+ memoizer_counters.append(SCons.Memoize.CountDict('PathList', _PathList_key))
+
+ def PathList(self, pathlist):
+ """
+ Returns the cached _PathList object for the specified pathlist,
+ creating and caching a new object as necessary.
+ """
+ pathlist = self._PathList_key(pathlist)
+ try:
+ memo_dict = self._memo['PathList']
+ except KeyError:
+ memo_dict = {}
+ self._memo['PathList'] = memo_dict
+ else:
+ try:
+ return memo_dict[pathlist]
+ except KeyError:
+ pass
+
+ result = _PathList(pathlist)
+
+ memo_dict[pathlist] = result
+
+ return result
+
+PathList = PathListCache().PathList
+
+
+del PathListCache
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Platform/__init__.py
new file mode 100644
index 000000000..12158650b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/__init__.py
@@ -0,0 +1,216 @@
+"""SCons.Platform
+
+SCons platform selection.
+
+This looks for modules that define a callable object that can modify a
+construction environment as appropriate for a given platform.
+
+Note that we take a more simplistic view of "platform" than Python does.
+We're looking for a single string that determines a set of
+tool-independent variables with which to initialize a construction
+environment. Consequently, we'll examine both sys.platform and os.name
+(and anything else that might come in to play) in order to return some
+specification which is unique enough for our purposes.
+
+Note that because this subsysem just *selects* a callable that can
+modify a construction environment, it's possible for people to define
+their own "platform specification" in an arbitrary callable function.
+No one needs to use or tie in to this subsystem in order to roll
+their own platform definition.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import imp
+import os
+import string
+import sys
+import tempfile
+
+import SCons.Errors
+import SCons.Tool
+
+def platform_default():
+ """Return the platform string for our execution environment.
+
+ The returned value should map to one of the SCons/Platform/*.py
+ files. Since we're architecture independent, though, we don't
+ care about the machine architecture.
+ """
+ osname = os.name
+ if osname == 'java':
+ osname = os._osType
+ if osname == 'posix':
+ if sys.platform == 'cygwin':
+ return 'cygwin'
+ elif string.find(sys.platform, 'irix') != -1:
+ return 'irix'
+ elif string.find(sys.platform, 'sunos') != -1:
+ return 'sunos'
+ elif string.find(sys.platform, 'hp-ux') != -1:
+ return 'hpux'
+ elif string.find(sys.platform, 'aix') != -1:
+ return 'aix'
+ elif string.find(sys.platform, 'darwin') != -1:
+ return 'darwin'
+ else:
+ return 'posix'
+ elif os.name == 'os2':
+ return 'os2'
+ else:
+ return sys.platform
+
+def platform_module(name = platform_default()):
+ """Return the imported module for the platform.
+
+ This looks for a module name that matches the specified argument.
+ If the name is unspecified, we fetch the appropriate default for
+ our execution environment.
+ """
+ full_name = 'SCons.Platform.' + name
+ if not sys.modules.has_key(full_name):
+ if os.name == 'java':
+ eval(full_name)
+ else:
+ try:
+ file, path, desc = imp.find_module(name,
+ sys.modules['SCons.Platform'].__path__)
+ try:
+ mod = imp.load_module(full_name, file, path, desc)
+ finally:
+ if file:
+ file.close()
+ except ImportError:
+ try:
+ import zipimport
+ importer = zipimport.zipimporter( sys.modules['SCons.Platform'].__path__[0] )
+ mod = importer.load_module(full_name)
+ except ImportError:
+ raise SCons.Errors.UserError, "No platform named '%s'" % name
+ setattr(SCons.Platform, name, mod)
+ return sys.modules[full_name]
+
+def DefaultToolList(platform, env):
+ """Select a default tool list for the specified platform.
+ """
+ return SCons.Tool.tool_list(platform, env)
+
+class PlatformSpec:
+ def __init__(self, name):
+ self.name = name
+
+ def __str__(self):
+ return self.name
+
+class TempFileMunge:
+ """A callable class. You can set an Environment variable to this,
+ then call it with a string argument, then it will perform temporary
+ file substitution on it. This is used to circumvent the long command
+ line limitation.
+
+ Example usage:
+ env["TEMPFILE"] = TempFileMunge
+ env["LINKCOM"] = "${TEMPFILE('$LINK $TARGET $SOURCES')}"
+
+ By default, the name of the temporary file used begins with a
+ prefix of '@'. This may be configred for other tool chains by
+ setting '$TEMPFILEPREFIX'.
+
+ env["TEMPFILEPREFIX"] = '-@' # diab compiler
+ env["TEMPFILEPREFIX"] = '-via' # arm tool chain
+ """
+ def __init__(self, cmd):
+ self.cmd = cmd
+
+ def __call__(self, target, source, env, for_signature):
+ if for_signature:
+ return self.cmd
+ cmd = env.subst_list(self.cmd, 0, target, source)[0]
+ try:
+ maxline = int(env.subst('$MAXLINELENGTH'))
+ except ValueError:
+ maxline = 2048
+
+ if (reduce(lambda x, y: x + len(y), cmd, 0) + len(cmd)) <= maxline:
+ return self.cmd
+
+ # We do a normpath because mktemp() has what appears to be
+ # a bug in Windows that will use a forward slash as a path
+ # delimiter. Windows's link mistakes that for a command line
+ # switch and barfs.
+ #
+ # We use the .lnk suffix for the benefit of the Phar Lap
+ # linkloc linker, which likes to append an .lnk suffix if
+ # none is given.
+ tmp = os.path.normpath(tempfile.mktemp('.lnk'))
+ native_tmp = SCons.Util.get_native_path(tmp)
+
+ if env['SHELL'] and env['SHELL'] == 'sh':
+ # The sh shell will try to escape the backslashes in the
+ # path, so unescape them.
+ native_tmp = string.replace(native_tmp, '\\', r'\\\\')
+ # In Cygwin, we want to use rm to delete the temporary
+ # file, because del does not exist in the sh shell.
+ rm = env.Detect('rm') or 'del'
+ else:
+ # Don't use 'rm' if the shell is not sh, because rm won't
+ # work with the Windows shells (cmd.exe or command.com) or
+ # Windows path names.
+ rm = 'del'
+
+ prefix = env.subst('$TEMPFILEPREFIX')
+ if not prefix:
+ prefix = '@'
+
+ args = map(SCons.Subst.quote_spaces, cmd[1:])
+ open(tmp, 'w').write(string.join(args, " ") + "\n")
+ # XXX Using the SCons.Action.print_actions value directly
+ # like this is bogus, but expedient. This class should
+ # really be rewritten as an Action that defines the
+ # __call__() and strfunction() methods and lets the
+ # normal action-execution logic handle whether or not to
+ # print/execute the action. The problem, though, is all
+ # of that is decided before we execute this method as
+ # part of expanding the $TEMPFILE construction variable.
+ # Consequently, refactoring this will have to wait until
+ # we get more flexible with allowing Actions to exist
+ # independently and get strung together arbitrarily like
+ # Ant tasks. In the meantime, it's going to be more
+ # user-friendly to not let obsession with architectural
+ # purity get in the way of just being helpful, so we'll
+ # reach into SCons.Action directly.
+ if SCons.Action.print_actions:
+ print("Using tempfile "+native_tmp+" for command line:\n"+
+ str(cmd[0]) + " " + string.join(args," "))
+ return [ cmd[0], prefix + native_tmp + '\n' + rm, native_tmp ]
+
+def Platform(name = platform_default()):
+ """Select a canned Platform specification.
+ """
+ module = platform_module(name)
+ spec = PlatformSpec(name)
+ spec.__call__ = module.generate
+ return spec
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/aix.py b/tools/scons/scons-local-1.2.0/SCons/Platform/aix.py
new file mode 100644
index 000000000..c8cb7e89f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/aix.py
@@ -0,0 +1,65 @@
+"""engine.SCons.Platform.aix
+
+Platform-specific initialization for IBM AIX systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/aix.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import string
+
+import posix
+
+def get_xlc(env, xlc=None, xlc_r=None, packages=[]):
+ # Use the AIX package installer tool lslpp to figure out where a
+ # given xl* compiler is installed and what version it is.
+ xlcPath = None
+ xlcVersion = None
+
+ if xlc is None:
+ xlc = env.get('CC', 'xlc')
+ if xlc_r is None:
+ xlc_r = xlc + '_r'
+ for package in packages:
+ cmd = "lslpp -fc " + package + " 2>/dev/null | egrep '" + xlc + "([^-_a-zA-Z0-9].*)?$'"
+ line = os.popen(cmd).readline()
+ if line:
+ v, p = string.split(line, ':')[1:3]
+ xlcVersion = string.split(v)[1]
+ xlcPath = string.split(p)[0]
+ xlcPath = xlcPath[:xlcPath.rindex('/')]
+ break
+ return (xlcPath, xlc, xlc_r, xlcVersion)
+
+def generate(env):
+ posix.generate(env)
+ #Based on AIX 5.2: ARG_MAX=24576 - 3000 for environment expansion
+ env['MAXLINELENGTH'] = 21576
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/cygwin.py b/tools/scons/scons-local-1.2.0/SCons/Platform/cygwin.py
new file mode 100644
index 000000000..f51eeb16e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/cygwin.py
@@ -0,0 +1,49 @@
+"""SCons.Platform.cygwin
+
+Platform-specific initialization for Cygwin systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/cygwin.py 3842 2008/12/20 22:59:52 scons"
+
+import posix
+from SCons.Platform import TempFileMunge
+
+def generate(env):
+ posix.generate(env)
+
+ env['PROGPREFIX'] = ''
+ env['PROGSUFFIX'] = '.exe'
+ env['SHLIBPREFIX'] = ''
+ env['SHLIBSUFFIX'] = '.dll'
+ env['LIBPREFIXES'] = [ '$LIBPREFIX', '$SHLIBPREFIX' ]
+ env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
+ env['TEMPFILE'] = TempFileMunge
+ env['TEMPFILEPREFIX'] = '@'
+ env['MAXLINELENGTH'] = 2048
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/darwin.py b/tools/scons/scons-local-1.2.0/SCons/Platform/darwin.py
new file mode 100644
index 000000000..94365465c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/darwin.py
@@ -0,0 +1,40 @@
+"""engine.SCons.Platform.darwin
+
+Platform-specific initialization for Mac OS X systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/darwin.py 3842 2008/12/20 22:59:52 scons"
+
+import posix
+
+def generate(env):
+ posix.generate(env)
+ env['SHLIBSUFFIX'] = '.dylib'
+ env['ENV']['PATH'] = env['ENV']['PATH'] + ':/sw/bin'
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/hpux.py b/tools/scons/scons-local-1.2.0/SCons/Platform/hpux.py
new file mode 100644
index 000000000..2bd468b71
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/hpux.py
@@ -0,0 +1,40 @@
+"""engine.SCons.Platform.hpux
+
+Platform-specific initialization for HP-UX systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/hpux.py 3842 2008/12/20 22:59:52 scons"
+
+import posix
+
+def generate(env):
+ posix.generate(env)
+ #Based on HP-UX11i: ARG_MAX=2048000 - 3000 for environment expansion
+ env['MAXLINELENGTH'] = 2045000
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/irix.py b/tools/scons/scons-local-1.2.0/SCons/Platform/irix.py
new file mode 100644
index 000000000..b70481db2
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/irix.py
@@ -0,0 +1,38 @@
+"""SCons.Platform.irix
+
+Platform-specific initialization for SGI IRIX systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/irix.py 3842 2008/12/20 22:59:52 scons"
+
+import posix
+
+def generate(env):
+ posix.generate(env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/os2.py b/tools/scons/scons-local-1.2.0/SCons/Platform/os2.py
new file mode 100644
index 000000000..803d890d9
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/os2.py
@@ -0,0 +1,49 @@
+"""SCons.Platform.os2
+
+Platform-specific initialization for OS/2 systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/os2.py 3842 2008/12/20 22:59:52 scons"
+
+def generate(env):
+ if not env.has_key('ENV'):
+ env['ENV'] = {}
+ env['OBJPREFIX'] = ''
+ env['OBJSUFFIX'] = '.obj'
+ env['SHOBJPREFIX'] = '$OBJPREFIX'
+ env['SHOBJSUFFIX'] = '$OBJSUFFIX'
+ env['PROGPREFIX'] = ''
+ env['PROGSUFFIX'] = '.exe'
+ env['LIBPREFIX'] = ''
+ env['LIBSUFFIX'] = '.lib'
+ env['SHLIBPREFIX'] = ''
+ env['SHLIBSUFFIX'] = '.dll'
+ env['LIBPREFIXES'] = '$LIBPREFIX'
+ env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/posix.py b/tools/scons/scons-local-1.2.0/SCons/Platform/posix.py
new file mode 100644
index 000000000..2a7c54420
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/posix.py
@@ -0,0 +1,258 @@
+"""SCons.Platform.posix
+
+Platform-specific initialization for POSIX (Linux, UNIX, etc.) systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/posix.py 3842 2008/12/20 22:59:52 scons"
+
+import errno
+import os
+import os.path
+import string
+import subprocess
+import sys
+import select
+
+import SCons.Util
+from SCons.Platform import TempFileMunge
+
+exitvalmap = {
+ 2 : 127,
+ 13 : 126,
+}
+
+def escape(arg):
+ "escape shell special characters"
+ slash = '\\'
+ special = '"$()'
+
+ arg = string.replace(arg, slash, slash+slash)
+ for c in special:
+ arg = string.replace(arg, c, slash+c)
+
+ return '"' + arg + '"'
+
+def exec_system(l, env):
+ stat = os.system(string.join(l))
+ if stat & 0xff:
+ return stat | 0x80
+ return stat >> 8
+
+def exec_spawnvpe(l, env):
+ stat = os.spawnvpe(os.P_WAIT, l[0], l, env)
+ # os.spawnvpe() returns the actual exit code, not the encoding
+ # returned by os.waitpid() or os.system().
+ return stat
+
+def exec_fork(l, env):
+ pid = os.fork()
+ if not pid:
+ # Child process.
+ exitval = 127
+ try:
+ os.execvpe(l[0], l, env)
+ except OSError, e:
+ exitval = exitvalmap.get(e[0], e[0])
+ sys.stderr.write("scons: %s: %s\n" % (l[0], e[1]))
+ os._exit(exitval)
+ else:
+ # Parent process.
+ pid, stat = os.waitpid(pid, 0)
+ if stat & 0xff:
+ return stat | 0x80
+ return stat >> 8
+
+def _get_env_command(sh, escape, cmd, args, env):
+ s = string.join(args)
+ if env:
+ l = ['env', '-'] + \
+ map(lambda t, e=escape: e(t[0])+'='+e(t[1]), env.items()) + \
+ [sh, '-c', escape(s)]
+ s = string.join(l)
+ return s
+
+def env_spawn(sh, escape, cmd, args, env):
+ return exec_system([_get_env_command( sh, escape, cmd, args, env)], env)
+
+def spawnvpe_spawn(sh, escape, cmd, args, env):
+ return exec_spawnvpe([sh, '-c', string.join(args)], env)
+
+def fork_spawn(sh, escape, cmd, args, env):
+ return exec_fork([sh, '-c', string.join(args)], env)
+
+def process_cmd_output(cmd_stdout, cmd_stderr, stdout, stderr):
+ stdout_eof = stderr_eof = 0
+ while not (stdout_eof and stderr_eof):
+ try:
+ (i,o,e) = select.select([cmd_stdout, cmd_stderr], [], [])
+ if cmd_stdout in i:
+ str = cmd_stdout.read()
+ if len(str) == 0:
+ stdout_eof = 1
+ elif stdout != None:
+ stdout.write(str)
+ if cmd_stderr in i:
+ str = cmd_stderr.read()
+ if len(str) == 0:
+ #sys.__stderr__.write( "stderr_eof=1\n" )
+ stderr_eof = 1
+ else:
+ #sys.__stderr__.write( "str(stderr) = %s\n" % str )
+ stderr.write(str)
+ except select.error, (_errno, _strerror):
+ if _errno != errno.EINTR:
+ raise
+
+def exec_popen3(l, env, stdout, stderr):
+ proc = subprocess.Popen(string.join(l),
+ stdout=stdout,
+ stderr=stderr,
+ shell=True)
+ stat = proc.wait()
+ if stat & 0xff:
+ return stat | 0x80
+ return stat >> 8
+
+def exec_piped_fork(l, env, stdout, stderr):
+ # spawn using fork / exec and providing a pipe for the command's
+ # stdout / stderr stream
+ if stdout != stderr:
+ (rFdOut, wFdOut) = os.pipe()
+ (rFdErr, wFdErr) = os.pipe()
+ else:
+ (rFdOut, wFdOut) = os.pipe()
+ rFdErr = rFdOut
+ wFdErr = wFdOut
+ # do the fork
+ pid = os.fork()
+ if not pid:
+ # Child process
+ os.close( rFdOut )
+ if rFdOut != rFdErr:
+ os.close( rFdErr )
+ os.dup2( wFdOut, 1 ) # is there some symbolic way to do that ?
+ os.dup2( wFdErr, 2 )
+ os.close( wFdOut )
+ if stdout != stderr:
+ os.close( wFdErr )
+ exitval = 127
+ try:
+ os.execvpe(l[0], l, env)
+ except OSError, e:
+ exitval = exitvalmap.get(e[0], e[0])
+ stderr.write("scons: %s: %s\n" % (l[0], e[1]))
+ os._exit(exitval)
+ else:
+ # Parent process
+ pid, stat = os.waitpid(pid, 0)
+ os.close( wFdOut )
+ if stdout != stderr:
+ os.close( wFdErr )
+ childOut = os.fdopen( rFdOut )
+ if stdout != stderr:
+ childErr = os.fdopen( rFdErr )
+ else:
+ childErr = childOut
+ process_cmd_output(childOut, childErr, stdout, stderr)
+ os.close( rFdOut )
+ if stdout != stderr:
+ os.close( rFdErr )
+ if stat & 0xff:
+ return stat | 0x80
+ return stat >> 8
+
+def piped_env_spawn(sh, escape, cmd, args, env, stdout, stderr):
+ # spawn using Popen3 combined with the env command
+ # the command name and the command's stdout is written to stdout
+ # the command's stderr is written to stderr
+ return exec_popen3([_get_env_command(sh, escape, cmd, args, env)],
+ env, stdout, stderr)
+
+def piped_fork_spawn(sh, escape, cmd, args, env, stdout, stderr):
+ # spawn using fork / exec and providing a pipe for the command's
+ # stdout / stderr stream
+ return exec_piped_fork([sh, '-c', string.join(args)],
+ env, stdout, stderr)
+
+
+
+def generate(env):
+ # If os.spawnvpe() exists, we use it to spawn commands. Otherwise
+ # if the env utility exists, we use os.system() to spawn commands,
+ # finally we fall back on os.fork()/os.exec().
+ #
+ # os.spawnvpe() is prefered because it is the most efficient. But
+ # for Python versions without it, os.system() is prefered because it
+ # is claimed that it works better with threads (i.e. -j) and is more
+ # efficient than forking Python.
+ #
+ # NB: Other people on the scons-users mailing list have claimed that
+ # os.fork()/os.exec() works better than os.system(). There may just
+ # not be a default that works best for all users.
+
+ if os.__dict__.has_key('spawnvpe'):
+ spawn = spawnvpe_spawn
+ elif env.Detect('env'):
+ spawn = env_spawn
+ else:
+ spawn = fork_spawn
+
+ if env.Detect('env'):
+ pspawn = piped_env_spawn
+ else:
+ pspawn = piped_fork_spawn
+
+ if not env.has_key('ENV'):
+ env['ENV'] = {}
+ env['ENV']['PATH'] = os.environ.get("PATH")
+ env['OBJPREFIX'] = ''
+ env['OBJSUFFIX'] = '.o'
+ env['SHOBJPREFIX'] = '$OBJPREFIX'
+ env['SHOBJSUFFIX'] = '$OBJSUFFIX'
+ env['PROGPREFIX'] = ''
+ env['PROGSUFFIX'] = ''
+ env['LIBPREFIX'] = 'lib'
+ env['LIBSUFFIX'] = '.a'
+ env['SHLIBPREFIX'] = '$LIBPREFIX'
+ env['SHLIBSUFFIX'] = '.so'
+ env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
+ env['LIBSUFFIXES'] = [ '$LIBSUFFIX', '$SHLIBSUFFIX' ]
+ env['PSPAWN'] = pspawn
+ env['SPAWN'] = spawn
+ env['SHELL'] = 'sh'
+ env['ESCAPE'] = escape
+ env['TEMPFILE'] = TempFileMunge
+ env['TEMPFILEPREFIX'] = '@'
+ #Based on LINUX: ARG_MAX=ARG_MAX=131072 - 3000 for environment expansion
+ #Note: specific platforms might rise or lower this value
+ env['MAXLINELENGTH'] = 128072
+
+ # This platform supports RPATH specifications.
+ env['__RPATH'] = '$_RPATH'
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/sunos.py b/tools/scons/scons-local-1.2.0/SCons/Platform/sunos.py
new file mode 100644
index 000000000..03435c691
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/sunos.py
@@ -0,0 +1,44 @@
+"""engine.SCons.Platform.sunos
+
+Platform-specific initialization for Sun systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/sunos.py 3842 2008/12/20 22:59:52 scons"
+
+import posix
+
+def generate(env):
+ posix.generate(env)
+ # Based on sunSparc 8:32bit
+ # ARG_MAX=1048320 - 3000 for environment expansion
+ env['MAXLINELENGTH'] = 1045320
+ env['PKGINFO'] = 'pkginfo'
+ env['PKGCHK'] = '/usr/sbin/pkgchk'
+ env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin'
diff --git a/tools/scons/scons-local-1.2.0/SCons/Platform/win32.py b/tools/scons/scons-local-1.2.0/SCons/Platform/win32.py
new file mode 100644
index 000000000..3ec0a526d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Platform/win32.py
@@ -0,0 +1,324 @@
+"""SCons.Platform.win32
+
+Platform-specific initialization for Win32 systems.
+
+There normally shouldn't be any need to import this module directly. It
+will usually be imported through the generic SCons.Platform.Platform()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Platform/win32.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+import sys
+import tempfile
+
+from SCons.Platform.posix import exitvalmap
+from SCons.Platform import TempFileMunge
+import SCons.Util
+
+
+
+try:
+ import msvcrt
+ import win32api
+ import win32con
+
+ msvcrt.get_osfhandle
+ win32api.SetHandleInformation
+ win32con.HANDLE_FLAG_INHERIT
+except ImportError:
+ parallel_msg = \
+ "you do not seem to have the pywin32 extensions installed;\n" + \
+ "\tparallel (-j) builds may not work reliably with open Python files."
+except AttributeError:
+ parallel_msg = \
+ "your pywin32 extensions do not support file handle operations;\n" + \
+ "\tparallel (-j) builds may not work reliably with open Python files."
+else:
+ parallel_msg = None
+
+ import __builtin__
+
+ _builtin_file = __builtin__.file
+ _builtin_open = __builtin__.open
+
+ def _scons_file(*args, **kw):
+ fp = apply(_builtin_file, args, kw)
+ win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()),
+ win32con.HANDLE_FLAG_INHERIT,
+ 0)
+ return fp
+
+ def _scons_open(*args, **kw):
+ fp = apply(_builtin_open, args, kw)
+ win32api.SetHandleInformation(msvcrt.get_osfhandle(fp.fileno()),
+ win32con.HANDLE_FLAG_INHERIT,
+ 0)
+ return fp
+
+ __builtin__.file = _scons_file
+ __builtin__.open = _scons_open
+
+
+
+# The upshot of all this is that, if you are using Python 1.5.2,
+# you had better have cmd or command.com in your PATH when you run
+# scons.
+
+def piped_spawn(sh, escape, cmd, args, env, stdout, stderr):
+ # There is no direct way to do that in python. What we do
+ # here should work for most cases:
+ # In case stdout (stderr) is not redirected to a file,
+ # we redirect it into a temporary file tmpFileStdout
+ # (tmpFileStderr) and copy the contents of this file
+ # to stdout (stderr) given in the argument
+ if not sh:
+ sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n")
+ return 127
+ else:
+ # one temporary file for stdout and stderr
+ tmpFileStdout = os.path.normpath(tempfile.mktemp())
+ tmpFileStderr = os.path.normpath(tempfile.mktemp())
+
+ # check if output is redirected
+ stdoutRedirected = 0
+ stderrRedirected = 0
+ for arg in args:
+ # are there more possibilities to redirect stdout ?
+ if (string.find( arg, ">", 0, 1 ) != -1 or
+ string.find( arg, "1>", 0, 2 ) != -1):
+ stdoutRedirected = 1
+ # are there more possibilities to redirect stderr ?
+ if string.find( arg, "2>", 0, 2 ) != -1:
+ stderrRedirected = 1
+
+ # redirect output of non-redirected streams to our tempfiles
+ if stdoutRedirected == 0:
+ args.append(">" + str(tmpFileStdout))
+ if stderrRedirected == 0:
+ args.append("2>" + str(tmpFileStderr))
+
+ # actually do the spawn
+ try:
+ args = [sh, '/C', escape(string.join(args)) ]
+ ret = os.spawnve(os.P_WAIT, sh, args, env)
+ except OSError, e:
+ # catch any error
+ try:
+ ret = exitvalmap[e[0]]
+ except KeyError:
+ sys.stderr.write("scons: unknown OSError exception code %d - %s: %s\n" % (e[0], cmd, e[1]))
+ if stderr != None:
+ stderr.write("scons: %s: %s\n" % (cmd, e[1]))
+ # copy child output from tempfiles to our streams
+ # and do clean up stuff
+ if stdout != None and stdoutRedirected == 0:
+ try:
+ stdout.write(open( tmpFileStdout, "r" ).read())
+ os.remove( tmpFileStdout )
+ except (IOError, OSError):
+ pass
+
+ if stderr != None and stderrRedirected == 0:
+ try:
+ stderr.write(open( tmpFileStderr, "r" ).read())
+ os.remove( tmpFileStderr )
+ except (IOError, OSError):
+ pass
+ return ret
+
+def exec_spawn(l, env):
+ try:
+ result = os.spawnve(os.P_WAIT, l[0], l, env)
+ except OSError, e:
+ try:
+ result = exitvalmap[e[0]]
+ sys.stderr.write("scons: %s: %s\n" % (l[0], e[1]))
+ except KeyError:
+ result = 127
+ if len(l) > 2:
+ if len(l[2]) < 1000:
+ command = string.join(l[0:3])
+ else:
+ command = l[0]
+ else:
+ command = l[0]
+ sys.stderr.write("scons: unknown OSError exception code %d - '%s': %s\n" % (e[0], command, e[1]))
+ return result
+
+def spawn(sh, escape, cmd, args, env):
+ if not sh:
+ sys.stderr.write("scons: Could not find command interpreter, is it in your PATH?\n")
+ return 127
+ return exec_spawn([sh, '/C', escape(string.join(args))], env)
+
+# Windows does not allow special characters in file names anyway, so no
+# need for a complex escape function, we will just quote the arg, except
+# that "cmd /c" requires that if an argument ends with a backslash it
+# needs to be escaped so as not to interfere with closing double quote
+# that we add.
+def escape(x):
+ if x[-1] == '\\':
+ x = x + '\\'
+ return '"' + x + '"'
+
+# Get the windows system directory name
+def get_system_root():
+ # A resonable default if we can't read the registry
+ try:
+ val = os.environ['SYSTEMROOT']
+ except KeyError:
+ val = "C:/WINDOWS"
+ pass
+
+ # First see if we can look in the registry...
+ if SCons.Util.can_read_reg:
+ try:
+ # Look for Windows NT system root
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Microsoft\\Windows NT\\CurrentVersion')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
+ except SCons.Util.RegError:
+ try:
+ # Okay, try the Windows 9x system root
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Microsoft\\Windows\\CurrentVersion')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
+ except KeyboardInterrupt:
+ raise
+ except:
+ pass
+ return val
+
+# Get the location of the program files directory
+def get_program_files_dir():
+ # Now see if we can look in the registry...
+ val = ''
+ if SCons.Util.can_read_reg:
+ try:
+ # Look for Windows Program Files directory
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Microsoft\\Windows\\CurrentVersion')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir')
+ except SCons.Util.RegError:
+ val = ''
+ pass
+
+ if val == '':
+ # A reasonable default if we can't read the registry
+ # (Actually, it's pretty reasonable even if we can :-)
+ val = os.path.join(os.path.dirname(get_system_root()),"Program Files")
+
+ return val
+
+def generate(env):
+ # Attempt to find cmd.exe (for WinNT/2k/XP) or
+ # command.com for Win9x
+ cmd_interp = ''
+ # First see if we can look in the registry...
+ if SCons.Util.can_read_reg:
+ try:
+ # Look for Windows NT system root
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Microsoft\\Windows NT\\CurrentVersion')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
+ cmd_interp = os.path.join(val, 'System32\\cmd.exe')
+ except SCons.Util.RegError:
+ try:
+ # Okay, try the Windows 9x system root
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Microsoft\\Windows\\CurrentVersion')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'SystemRoot')
+ cmd_interp = os.path.join(val, 'command.com')
+ except KeyboardInterrupt:
+ raise
+ except:
+ pass
+
+ # For the special case of not having access to the registry, we
+ # use a temporary path and pathext to attempt to find the command
+ # interpreter. If we fail, we try to find the interpreter through
+ # the env's PATH. The problem with that is that it might not
+ # contain an ENV and a PATH.
+ if not cmd_interp:
+ systemroot = r'C:\Windows'
+ if os.environ.has_key('SYSTEMROOT'):
+ systemroot = os.environ['SYSTEMROOT']
+ tmp_path = systemroot + os.pathsep + \
+ os.path.join(systemroot,'System32')
+ tmp_pathext = '.com;.exe;.bat;.cmd'
+ if os.environ.has_key('PATHEXT'):
+ tmp_pathext = os.environ['PATHEXT']
+ cmd_interp = SCons.Util.WhereIs('cmd', tmp_path, tmp_pathext)
+ if not cmd_interp:
+ cmd_interp = SCons.Util.WhereIs('command', tmp_path, tmp_pathext)
+
+ if not cmd_interp:
+ cmd_interp = env.Detect('cmd')
+ if not cmd_interp:
+ cmd_interp = env.Detect('command')
+
+
+ if not env.has_key('ENV'):
+ env['ENV'] = {}
+
+ # Import things from the external environment to the construction
+ # environment's ENV. This is a potential slippery slope, because we
+ # *don't* want to make builds dependent on the user's environment by
+ # default. We're doing this for SYSTEMROOT, though, because it's
+ # needed for anything that uses sockets, and seldom changes, and
+ # for SYSTEMDRIVE because it's related.
+ #
+ # Weigh the impact carefully before adding other variables to this list.
+ import_env = [ 'SYSTEMDRIVE', 'SYSTEMROOT', 'TEMP', 'TMP' ]
+ for var in import_env:
+ v = os.environ.get(var)
+ if v:
+ env['ENV'][var] = v
+
+ env['ENV']['PATHEXT'] = '.COM;.EXE;.BAT;.CMD'
+ env['OBJPREFIX'] = ''
+ env['OBJSUFFIX'] = '.obj'
+ env['SHOBJPREFIX'] = '$OBJPREFIX'
+ env['SHOBJSUFFIX'] = '$OBJSUFFIX'
+ env['PROGPREFIX'] = ''
+ env['PROGSUFFIX'] = '.exe'
+ env['LIBPREFIX'] = ''
+ env['LIBSUFFIX'] = '.lib'
+ env['SHLIBPREFIX'] = ''
+ env['SHLIBSUFFIX'] = '.dll'
+ env['LIBPREFIXES'] = [ '$LIBPREFIX' ]
+ env['LIBSUFFIXES'] = [ '$LIBSUFFIX' ]
+ env['PSPAWN'] = piped_spawn
+ env['SPAWN'] = spawn
+ env['SHELL'] = cmd_interp
+ env['TEMPFILE'] = TempFileMunge
+ env['TEMPFILEPREFIX'] = '@'
+ env['MAXLINELENGTH'] = 2048
+ env['ESCAPE'] = escape
diff --git a/tools/scons/scons-local-1.2.0/SCons/SConf.py b/tools/scons/scons-local-1.2.0/SCons/SConf.py
new file mode 100644
index 000000000..ec80fe97a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/SConf.py
@@ -0,0 +1,1012 @@
+"""SCons.SConf
+
+Autoconf-like configuration support.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/SConf.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import re
+import string
+import StringIO
+import sys
+import traceback
+import types
+
+import SCons.Action
+import SCons.Builder
+import SCons.Errors
+import SCons.Job
+import SCons.Node.FS
+import SCons.Taskmaster
+import SCons.Util
+import SCons.Warnings
+import SCons.Conftest
+
+from SCons.Debug import Trace
+
+# Turn off the Conftest error logging
+SCons.Conftest.LogInputFiles = 0
+SCons.Conftest.LogErrorMessages = 0
+
+# Set
+build_type = None
+build_types = ['clean', 'help']
+
+def SetBuildType(type):
+ global build_type
+ build_type = type
+
+# to be set, if we are in dry-run mode
+dryrun = 0
+
+AUTO=0 # use SCons dependency scanning for up-to-date checks
+FORCE=1 # force all tests to be rebuilt
+CACHE=2 # force all tests to be taken from cache (raise an error, if necessary)
+cache_mode = AUTO
+
+def SetCacheMode(mode):
+ """Set the Configure cache mode. mode must be one of "auto", "force",
+ or "cache"."""
+ global cache_mode
+ if mode == "auto":
+ cache_mode = AUTO
+ elif mode == "force":
+ cache_mode = FORCE
+ elif mode == "cache":
+ cache_mode = CACHE
+ else:
+ raise ValueError, "SCons.SConf.SetCacheMode: Unknown mode " + mode
+
+progress_display = SCons.Util.display # will be overwritten by SCons.Script
+def SetProgressDisplay(display):
+ """Set the progress display to use (called from SCons.Script)"""
+ global progress_display
+ progress_display = display
+
+SConfFS = None
+
+_ac_build_counter = 0 # incremented, whenever TryBuild is called
+_ac_config_logs = {} # all config.log files created in this build
+_ac_config_hs = {} # all config.h files created in this build
+sconf_global = None # current sconf object
+
+def _createConfigH(target, source, env):
+ t = open(str(target[0]), "w")
+ defname = re.sub('[^A-Za-z0-9_]', '_', string.upper(str(target[0])))
+ t.write("""#ifndef %(DEFNAME)s_SEEN
+#define %(DEFNAME)s_SEEN
+
+""" % {'DEFNAME' : defname})
+ t.write(source[0].get_contents())
+ t.write("""
+#endif /* %(DEFNAME)s_SEEN */
+""" % {'DEFNAME' : defname})
+ t.close()
+
+def _stringConfigH(target, source, env):
+ return "scons: Configure: creating " + str(target[0])
+
+def CreateConfigHBuilder(env):
+ """Called just before the building targets phase begins."""
+ if len(_ac_config_hs) == 0:
+ return
+ action = SCons.Action.Action(_createConfigH,
+ _stringConfigH)
+ sconfigHBld = SCons.Builder.Builder(action=action)
+ env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} )
+ for k in _ac_config_hs.keys():
+ env.SConfigHBuilder(k, env.Value(_ac_config_hs[k]))
+
+class SConfWarning(SCons.Warnings.Warning):
+ pass
+SCons.Warnings.enableWarningClass(SConfWarning)
+
+# some error definitions
+class SConfError(SCons.Errors.UserError):
+ def __init__(self,msg):
+ SCons.Errors.UserError.__init__(self,msg)
+
+class ConfigureDryRunError(SConfError):
+ """Raised when a file or directory needs to be updated during a Configure
+ process, but the user requested a dry-run"""
+ def __init__(self,target):
+ if not isinstance(target, SCons.Node.FS.File):
+ msg = 'Cannot create configure directory "%s" within a dry-run.' % str(target)
+ else:
+ msg = 'Cannot update configure test "%s" within a dry-run.' % str(target)
+ SConfError.__init__(self,msg)
+
+class ConfigureCacheError(SConfError):
+ """Raised when a use explicitely requested the cache feature, but the test
+ is run the first time."""
+ def __init__(self,target):
+ SConfError.__init__(self, '"%s" is not yet built and cache is forced.' % str(target))
+
+# define actions for building text files
+def _createSource( target, source, env ):
+ fd = open(str(target[0]), "w")
+ fd.write(source[0].get_contents())
+ fd.close()
+def _stringSource( target, source, env ):
+ return (str(target[0]) + ' <-\n |' +
+ string.replace( source[0].get_contents(),
+ '\n', "\n |" ) )
+
+# python 2.2 introduces types.BooleanType
+BooleanTypes = [types.IntType]
+if hasattr(types, 'BooleanType'): BooleanTypes.append(types.BooleanType)
+
+class SConfBuildInfo(SCons.Node.FS.FileBuildInfo):
+ """
+ Special build info for targets of configure tests. Additional members
+ are result (did the builder succeed last time?) and string, which
+ contains messages of the original build phase.
+ """
+ result = None # -> 0/None -> no error, != 0 error
+ string = None # the stdout / stderr output when building the target
+
+ def set_build_result(self, result, string):
+ self.result = result
+ self.string = string
+
+
+class Streamer:
+ """
+ 'Sniffer' for a file-like writable object. Similar to the unix tool tee.
+ """
+ def __init__(self, orig):
+ self.orig = orig
+ self.s = StringIO.StringIO()
+
+ def write(self, str):
+ if self.orig:
+ self.orig.write(str)
+ self.s.write(str)
+
+ def writelines(self, lines):
+ for l in lines:
+ self.write(l + '\n')
+
+ def getvalue(self):
+ """
+ Return everything written to orig since the Streamer was created.
+ """
+ return self.s.getvalue()
+
+ def flush(self):
+ if self.orig:
+ self.orig.flush()
+ self.s.flush()
+
+
+class SConfBuildTask(SCons.Taskmaster.Task):
+ """
+ This is almost the same as SCons.Script.BuildTask. Handles SConfErrors
+ correctly and knows about the current cache_mode.
+ """
+ def display(self, message):
+ if sconf_global.logstream:
+ sconf_global.logstream.write("scons: Configure: " + message + "\n")
+
+ def display_cached_string(self, bi):
+ """
+ Logs the original builder messages, given the SConfBuildInfo instance
+ bi.
+ """
+ if not isinstance(bi, SConfBuildInfo):
+ SCons.Warnings.warn(SConfWarning,
+ "The stored build information has an unexpected class: %s" % bi.__class__)
+ else:
+ self.display("The original builder output was:\n" +
+ string.replace(" |" + str(bi.string),
+ "\n", "\n |"))
+
+ def failed(self):
+ # check, if the reason was a ConfigureDryRunError or a
+ # ConfigureCacheError and if yes, reraise the exception
+ exc_type = self.exc_info()[0]
+ if issubclass(exc_type, SConfError):
+ raise
+ elif issubclass(exc_type, SCons.Errors.BuildError):
+ # we ignore Build Errors (occurs, when a test doesn't pass)
+ # Clear the exception to prevent the contained traceback
+ # to build a reference cycle.
+ self.exc_clear()
+ else:
+ self.display('Caught exception while building "%s":\n' %
+ self.targets[0])
+ try:
+ excepthook = sys.excepthook
+ except AttributeError:
+ # Earlier versions of Python don't have sys.excepthook...
+ def excepthook(type, value, tb):
+ traceback.print_tb(tb)
+ print type, value
+ apply(excepthook, self.exc_info())
+ return SCons.Taskmaster.Task.failed(self)
+
+ def collect_node_states(self):
+ # returns (is_up_to_date, cached_error, cachable)
+ # where is_up_to_date is 1, if the node(s) are up_to_date
+ # cached_error is 1, if the node(s) are up_to_date, but the
+ # build will fail
+ # cachable is 0, if some nodes are not in our cache
+ T = 0
+ changed = False
+ cached_error = False
+ cachable = True
+ for t in self.targets:
+ if T: Trace('%s' % (t))
+ bi = t.get_stored_info().binfo
+ if isinstance(bi, SConfBuildInfo):
+ if T: Trace(': SConfBuildInfo')
+ if cache_mode == CACHE:
+ t.set_state(SCons.Node.up_to_date)
+ if T: Trace(': set_state(up_to-date)')
+ else:
+ if T: Trace(': get_state() %s' % t.get_state())
+ if T: Trace(': changed() %s' % t.changed())
+ if (t.get_state() != SCons.Node.up_to_date and t.changed()):
+ changed = True
+ if T: Trace(': changed %s' % changed)
+ cached_error = cached_error or bi.result
+ else:
+ if T: Trace(': else')
+ # the node hasn't been built in a SConf context or doesn't
+ # exist
+ cachable = False
+ changed = ( t.get_state() != SCons.Node.up_to_date )
+ if T: Trace(': changed %s' % changed)
+ if T: Trace('\n')
+ return (not changed, cached_error, cachable)
+
+ def execute(self):
+ if not self.targets[0].has_builder():
+ return
+
+ sconf = sconf_global
+
+ is_up_to_date, cached_error, cachable = self.collect_node_states()
+
+ if cache_mode == CACHE and not cachable:
+ raise ConfigureCacheError(self.targets[0])
+ elif cache_mode == FORCE:
+ is_up_to_date = 0
+
+ if cached_error and is_up_to_date:
+ self.display("Building \"%s\" failed in a previous run and all "
+ "its sources are up to date." % str(self.targets[0]))
+ binfo = self.targets[0].get_stored_info().binfo
+ self.display_cached_string(binfo)
+ raise SCons.Errors.BuildError # will be 'caught' in self.failed
+ elif is_up_to_date:
+ self.display("\"%s\" is up to date." % str(self.targets[0]))
+ binfo = self.targets[0].get_stored_info().binfo
+ self.display_cached_string(binfo)
+ elif dryrun:
+ raise ConfigureDryRunError(self.targets[0])
+ else:
+ # note stdout and stderr are the same here
+ s = sys.stdout = sys.stderr = Streamer(sys.stdout)
+ try:
+ env = self.targets[0].get_build_env()
+ env['PSTDOUT'] = env['PSTDERR'] = s
+ try:
+ sconf.cached = 0
+ self.targets[0].build()
+ finally:
+ sys.stdout = sys.stderr = env['PSTDOUT'] = \
+ env['PSTDERR'] = sconf.logstream
+ except KeyboardInterrupt:
+ raise
+ except SystemExit:
+ exc_value = sys.exc_info()[1]
+ raise SCons.Errors.ExplicitExit(self.targets[0],exc_value.code)
+ except Exception, e:
+ for t in self.targets:
+ binfo = t.get_binfo()
+ binfo.__class__ = SConfBuildInfo
+ binfo.set_build_result(1, s.getvalue())
+ sconsign_entry = SCons.SConsign.SConsignEntry()
+ sconsign_entry.binfo = binfo
+ #sconsign_entry.ninfo = self.get_ninfo()
+ # We'd like to do this as follows:
+ # t.store_info(binfo)
+ # However, we need to store it as an SConfBuildInfo
+ # object, and store_info() will turn it into a
+ # regular FileNodeInfo if the target is itself a
+ # regular File.
+ sconsign = t.dir.sconsign()
+ sconsign.set_entry(t.name, sconsign_entry)
+ sconsign.merge()
+ raise e
+ else:
+ for t in self.targets:
+ binfo = t.get_binfo()
+ binfo.__class__ = SConfBuildInfo
+ binfo.set_build_result(0, s.getvalue())
+ sconsign_entry = SCons.SConsign.SConsignEntry()
+ sconsign_entry.binfo = binfo
+ #sconsign_entry.ninfo = self.get_ninfo()
+ # We'd like to do this as follows:
+ # t.store_info(binfo)
+ # However, we need to store it as an SConfBuildInfo
+ # object, and store_info() will turn it into a
+ # regular FileNodeInfo if the target is itself a
+ # regular File.
+ sconsign = t.dir.sconsign()
+ sconsign.set_entry(t.name, sconsign_entry)
+ sconsign.merge()
+
+class SConfBase:
+ """This is simply a class to represent a configure context. After
+ creating a SConf object, you can call any tests. After finished with your
+ tests, be sure to call the Finish() method, which returns the modified
+ environment.
+ Some words about caching: In most cases, it is not necessary to cache
+ Test results explicitely. Instead, we use the scons dependency checking
+ mechanism. For example, if one wants to compile a test program
+ (SConf.TryLink), the compiler is only called, if the program dependencies
+ have changed. However, if the program could not be compiled in a former
+ SConf run, we need to explicitely cache this error.
+ """
+
+ def __init__(self, env, custom_tests = {}, conf_dir='$CONFIGUREDIR',
+ log_file='$CONFIGURELOG', config_h = None, _depth = 0):
+ """Constructor. Pass additional tests in the custom_tests-dictinary,
+ e.g. custom_tests={'CheckPrivate':MyPrivateTest}, where MyPrivateTest
+ defines a custom test.
+ Note also the conf_dir and log_file arguments (you may want to
+ build tests in the VariantDir, not in the SourceDir)
+ """
+ global SConfFS
+ if not SConfFS:
+ SConfFS = SCons.Node.FS.default_fs or \
+ SCons.Node.FS.FS(env.fs.pathTop)
+ if not sconf_global is None:
+ raise (SCons.Errors.UserError,
+ "Only one SConf object may be active at one time")
+ self.env = env
+ if log_file != None:
+ log_file = SConfFS.File(env.subst(log_file))
+ self.logfile = log_file
+ self.logstream = None
+ self.lastTarget = None
+ self.depth = _depth
+ self.cached = 0 # will be set, if all test results are cached
+
+ # add default tests
+ default_tests = {
+ 'CheckCC' : CheckCC,
+ 'CheckCXX' : CheckCXX,
+ 'CheckSHCC' : CheckSHCC,
+ 'CheckSHCXX' : CheckSHCXX,
+ 'CheckFunc' : CheckFunc,
+ 'CheckType' : CheckType,
+ 'CheckTypeSize' : CheckTypeSize,
+ 'CheckDeclaration' : CheckDeclaration,
+ 'CheckHeader' : CheckHeader,
+ 'CheckCHeader' : CheckCHeader,
+ 'CheckCXXHeader' : CheckCXXHeader,
+ 'CheckLib' : CheckLib,
+ 'CheckLibWithHeader' : CheckLibWithHeader,
+ }
+ self.AddTests(default_tests)
+ self.AddTests(custom_tests)
+ self.confdir = SConfFS.Dir(env.subst(conf_dir))
+ if not config_h is None:
+ config_h = SConfFS.File(config_h)
+ self.config_h = config_h
+ self._startup()
+
+ def Finish(self):
+ """Call this method after finished with your tests:
+ env = sconf.Finish()
+ """
+ self._shutdown()
+ return self.env
+
+ def Define(self, name, value = None, comment = None):
+ """
+ Define a pre processor symbol name, with the optional given value in the
+ current config header.
+
+ If value is None (default), then #define name is written. If value is not
+ none, then #define name value is written.
+
+ comment is a string which will be put as a C comment in the
+ header, to explain the meaning of the value (appropriate C comments /* and
+ */ will be put automatically."""
+ lines = []
+ if comment:
+ comment_str = "/* %s */" % comment
+ lines.append(comment_str)
+
+ if value is not None:
+ define_str = "#define %s %s" % (name, value)
+ else:
+ define_str = "#define %s" % name
+ lines.append(define_str)
+ lines.append('')
+
+ self.config_h_text = self.config_h_text + string.join(lines, '\n')
+
+ def BuildNodes(self, nodes):
+ """
+ Tries to build the given nodes immediately. Returns 1 on success,
+ 0 on error.
+ """
+ if self.logstream != None:
+ # override stdout / stderr to write in log file
+ oldStdout = sys.stdout
+ sys.stdout = self.logstream
+ oldStderr = sys.stderr
+ sys.stderr = self.logstream
+
+ # the engine assumes the current path is the SConstruct directory ...
+ old_fs_dir = SConfFS.getcwd()
+ old_os_dir = os.getcwd()
+ SConfFS.chdir(SConfFS.Top, change_os_dir=1)
+
+ # Because we take responsibility here for writing out our
+ # own .sconsign info (see SConfBuildTask.execute(), above),
+ # we override the store_info() method with a null place-holder
+ # so we really control how it gets written.
+ for n in nodes:
+ n.store_info = n.do_not_store_info
+
+ ret = 1
+
+ try:
+ # ToDo: use user options for calc
+ save_max_drift = SConfFS.get_max_drift()
+ SConfFS.set_max_drift(0)
+ tm = SCons.Taskmaster.Taskmaster(nodes, SConfBuildTask)
+ # we don't want to build tests in parallel
+ jobs = SCons.Job.Jobs(1, tm )
+ jobs.run()
+ for n in nodes:
+ state = n.get_state()
+ if (state != SCons.Node.executed and
+ state != SCons.Node.up_to_date):
+ # the node could not be built. we return 0 in this case
+ ret = 0
+ finally:
+ SConfFS.set_max_drift(save_max_drift)
+ os.chdir(old_os_dir)
+ SConfFS.chdir(old_fs_dir, change_os_dir=0)
+ if self.logstream != None:
+ # restore stdout / stderr
+ sys.stdout = oldStdout
+ sys.stderr = oldStderr
+ return ret
+
+ def pspawn_wrapper(self, sh, escape, cmd, args, env):
+ """Wrapper function for handling piped spawns.
+
+ This looks to the calling interface (in Action.py) like a "normal"
+ spawn, but associates the call with the PSPAWN variable from
+ the construction environment and with the streams to which we
+ want the output logged. This gets slid into the construction
+ environment as the SPAWN variable so Action.py doesn't have to
+ know or care whether it's spawning a piped command or not.
+ """
+ return self.pspawn(sh, escape, cmd, args, env, self.logstream, self.logstream)
+
+
+ def TryBuild(self, builder, text = None, extension = ""):
+ """Low level TryBuild implementation. Normally you don't need to
+ call that - you can use TryCompile / TryLink / TryRun instead
+ """
+ global _ac_build_counter
+
+ # Make sure we have a PSPAWN value, and save the current
+ # SPAWN value.
+ try:
+ self.pspawn = self.env['PSPAWN']
+ except KeyError:
+ raise SCons.Errors.UserError('Missing PSPAWN construction variable.')
+ try:
+ save_spawn = self.env['SPAWN']
+ except KeyError:
+ raise SCons.Errors.UserError('Missing SPAWN construction variable.')
+
+ nodesToBeBuilt = []
+
+ f = "conftest_" + str(_ac_build_counter)
+ pref = self.env.subst( builder.builder.prefix )
+ suff = self.env.subst( builder.builder.suffix )
+ target = self.confdir.File(pref + f + suff)
+
+ try:
+ # Slide our wrapper into the construction environment as
+ # the SPAWN function.
+ self.env['SPAWN'] = self.pspawn_wrapper
+ sourcetext = self.env.Value(text)
+
+ if text != None:
+ textFile = self.confdir.File(f + extension)
+ textFileNode = self.env.SConfSourceBuilder(target=textFile,
+ source=sourcetext)
+ nodesToBeBuilt.extend(textFileNode)
+ source = textFileNode
+ else:
+ source = None
+
+ nodes = builder(target = target, source = source)
+ if not SCons.Util.is_List(nodes):
+ nodes = [nodes]
+ nodesToBeBuilt.extend(nodes)
+ result = self.BuildNodes(nodesToBeBuilt)
+
+ finally:
+ self.env['SPAWN'] = save_spawn
+
+ _ac_build_counter = _ac_build_counter + 1
+ if result:
+ self.lastTarget = nodes[0]
+ else:
+ self.lastTarget = None
+
+ return result
+
+ def TryAction(self, action, text = None, extension = ""):
+ """Tries to execute the given action with optional source file
+ contents <text> and optional source file extension <extension>,
+ Returns the status (0 : failed, 1 : ok) and the contents of the
+ output file.
+ """
+ builder = SCons.Builder.Builder(action=action)
+ self.env.Append( BUILDERS = {'SConfActionBuilder' : builder} )
+ ok = self.TryBuild(self.env.SConfActionBuilder, text, extension)
+ del self.env['BUILDERS']['SConfActionBuilder']
+ if ok:
+ outputStr = self.lastTarget.get_contents()
+ return (1, outputStr)
+ return (0, "")
+
+ def TryCompile( self, text, extension):
+ """Compiles the program given in text to an env.Object, using extension
+ as file extension (e.g. '.c'). Returns 1, if compilation was
+ successful, 0 otherwise. The target is saved in self.lastTarget (for
+ further processing).
+ """
+ return self.TryBuild(self.env.Object, text, extension)
+
+ def TryLink( self, text, extension ):
+ """Compiles the program given in text to an executable env.Program,
+ using extension as file extension (e.g. '.c'). Returns 1, if
+ compilation was successful, 0 otherwise. The target is saved in
+ self.lastTarget (for further processing).
+ """
+ return self.TryBuild(self.env.Program, text, extension )
+
+ def TryRun(self, text, extension ):
+ """Compiles and runs the program given in text, using extension
+ as file extension (e.g. '.c'). Returns (1, outputStr) on success,
+ (0, '') otherwise. The target (a file containing the program's stdout)
+ is saved in self.lastTarget (for further processing).
+ """
+ ok = self.TryLink(text, extension)
+ if( ok ):
+ prog = self.lastTarget
+ pname = str(prog)
+ output = SConfFS.File(pname+'.out')
+ node = self.env.Command(output, prog, [ [ pname, ">", "${TARGET}"] ])
+ ok = self.BuildNodes(node)
+ if ok:
+ outputStr = output.get_contents()
+ return( 1, outputStr)
+ return (0, "")
+
+ class TestWrapper:
+ """A wrapper around Tests (to ensure sanity)"""
+ def __init__(self, test, sconf):
+ self.test = test
+ self.sconf = sconf
+ def __call__(self, *args, **kw):
+ if not self.sconf.active:
+ raise (SCons.Errors.UserError,
+ "Test called after sconf.Finish()")
+ context = CheckContext(self.sconf)
+ ret = apply(self.test, (context,) + args, kw)
+ if not self.sconf.config_h is None:
+ self.sconf.config_h_text = self.sconf.config_h_text + context.config_h
+ context.Result("error: no result")
+ return ret
+
+ def AddTest(self, test_name, test_instance):
+ """Adds test_class to this SConf instance. It can be called with
+ self.test_name(...)"""
+ setattr(self, test_name, SConfBase.TestWrapper(test_instance, self))
+
+ def AddTests(self, tests):
+ """Adds all the tests given in the tests dictionary to this SConf
+ instance
+ """
+ for name in tests.keys():
+ self.AddTest(name, tests[name])
+
+ def _createDir( self, node ):
+ dirName = str(node)
+ if dryrun:
+ if not os.path.isdir( dirName ):
+ raise ConfigureDryRunError(dirName)
+ else:
+ if not os.path.isdir( dirName ):
+ os.makedirs( dirName )
+ node._exists = 1
+
+ def _startup(self):
+ """Private method. Set up logstream, and set the environment
+ variables necessary for a piped build
+ """
+ global _ac_config_logs
+ global sconf_global
+ global SConfFS
+
+ self.lastEnvFs = self.env.fs
+ self.env.fs = SConfFS
+ self._createDir(self.confdir)
+ self.confdir.up().add_ignore( [self.confdir] )
+
+ if self.logfile != None and not dryrun:
+ # truncate logfile, if SConf.Configure is called for the first time
+ # in a build
+ if _ac_config_logs.has_key(self.logfile):
+ log_mode = "a"
+ else:
+ _ac_config_logs[self.logfile] = None
+ log_mode = "w"
+ fp = open(str(self.logfile), log_mode)
+ self.logstream = SCons.Util.Unbuffered(fp)
+ # logfile may stay in a build directory, so we tell
+ # the build system not to override it with a eventually
+ # existing file with the same name in the source directory
+ self.logfile.dir.add_ignore( [self.logfile] )
+
+ tb = traceback.extract_stack()[-3-self.depth]
+ old_fs_dir = SConfFS.getcwd()
+ SConfFS.chdir(SConfFS.Top, change_os_dir=0)
+ self.logstream.write('file %s,line %d:\n\tConfigure(confdir = %s)\n' %
+ (tb[0], tb[1], str(self.confdir)) )
+ SConfFS.chdir(old_fs_dir)
+ else:
+ self.logstream = None
+ # we use a special builder to create source files from TEXT
+ action = SCons.Action.Action(_createSource,
+ _stringSource)
+ sconfSrcBld = SCons.Builder.Builder(action=action)
+ self.env.Append( BUILDERS={'SConfSourceBuilder':sconfSrcBld} )
+ self.config_h_text = _ac_config_hs.get(self.config_h, "")
+ self.active = 1
+ # only one SConf instance should be active at a time ...
+ sconf_global = self
+
+ def _shutdown(self):
+ """Private method. Reset to non-piped spawn"""
+ global sconf_global, _ac_config_hs
+
+ if not self.active:
+ raise SCons.Errors.UserError, "Finish may be called only once!"
+ if self.logstream != None and not dryrun:
+ self.logstream.write("\n")
+ self.logstream.close()
+ self.logstream = None
+ # remove the SConfSourceBuilder from the environment
+ blds = self.env['BUILDERS']
+ del blds['SConfSourceBuilder']
+ self.env.Replace( BUILDERS=blds )
+ self.active = 0
+ sconf_global = None
+ if not self.config_h is None:
+ _ac_config_hs[self.config_h] = self.config_h_text
+ self.env.fs = self.lastEnvFs
+
+class CheckContext:
+ """Provides a context for configure tests. Defines how a test writes to the
+ screen and log file.
+
+ A typical test is just a callable with an instance of CheckContext as
+ first argument:
+
+ def CheckCustom(context, ...)
+ context.Message('Checking my weird test ... ')
+ ret = myWeirdTestFunction(...)
+ context.Result(ret)
+
+ Often, myWeirdTestFunction will be one of
+ context.TryCompile/context.TryLink/context.TryRun. The results of
+ those are cached, for they are only rebuild, if the dependencies have
+ changed.
+ """
+
+ def __init__(self, sconf):
+ """Constructor. Pass the corresponding SConf instance."""
+ self.sconf = sconf
+ self.did_show_result = 0
+
+ # for Conftest.py:
+ self.vardict = {}
+ self.havedict = {}
+ self.headerfilename = None
+ self.config_h = "" # config_h text will be stored here
+ # we don't regenerate the config.h file after each test. That means,
+ # that tests won't be able to include the config.h file, and so
+ # they can't do an #ifdef HAVE_XXX_H. This shouldn't be a major
+ # issue, though. If it turns out, that we need to include config.h
+ # in tests, we must ensure, that the dependencies are worked out
+ # correctly. Note that we can't use Conftest.py's support for config.h,
+ # cause we will need to specify a builder for the config.h file ...
+
+ def Message(self, text):
+ """Inform about what we are doing right now, e.g.
+ 'Checking for SOMETHING ... '
+ """
+ self.Display(text)
+ self.sconf.cached = 1
+ self.did_show_result = 0
+
+ def Result(self, res):
+ """Inform about the result of the test. res may be an integer or a
+ string. In case of an integer, the written text will be 'ok' or
+ 'failed'.
+ The result is only displayed when self.did_show_result is not set.
+ """
+ if type(res) in BooleanTypes:
+ if res:
+ text = "yes"
+ else:
+ text = "no"
+ elif type(res) == types.StringType:
+ text = res
+ else:
+ raise TypeError, "Expected string, int or bool, got " + str(type(res))
+
+ if self.did_show_result == 0:
+ # Didn't show result yet, do it now.
+ self.Display(text + "\n")
+ self.did_show_result = 1
+
+ def TryBuild(self, *args, **kw):
+ return apply(self.sconf.TryBuild, args, kw)
+
+ def TryAction(self, *args, **kw):
+ return apply(self.sconf.TryAction, args, kw)
+
+ def TryCompile(self, *args, **kw):
+ return apply(self.sconf.TryCompile, args, kw)
+
+ def TryLink(self, *args, **kw):
+ return apply(self.sconf.TryLink, args, kw)
+
+ def TryRun(self, *args, **kw):
+ return apply(self.sconf.TryRun, args, kw)
+
+ def __getattr__( self, attr ):
+ if( attr == 'env' ):
+ return self.sconf.env
+ elif( attr == 'lastTarget' ):
+ return self.sconf.lastTarget
+ else:
+ raise AttributeError, "CheckContext instance has no attribute '%s'" % attr
+
+ #### Stuff used by Conftest.py (look there for explanations).
+
+ def BuildProg(self, text, ext):
+ self.sconf.cached = 1
+ # TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
+ return not self.TryBuild(self.env.Program, text, ext)
+
+ def CompileProg(self, text, ext):
+ self.sconf.cached = 1
+ # TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
+ return not self.TryBuild(self.env.Object, text, ext)
+
+ def CompileSharedObject(self, text, ext):
+ self.sconf.cached = 1
+ # TODO: should use self.vardict for $SHCC, $CPPFLAGS, etc.
+ return not self.TryBuild(self.env.SharedObject, text, ext)
+
+ def RunProg(self, text, ext):
+ self.sconf.cached = 1
+ # TODO: should use self.vardict for $CC, $CPPFLAGS, etc.
+ st, out = self.TryRun(text, ext)
+ return not st, out
+
+ def AppendLIBS(self, lib_name_list):
+ oldLIBS = self.env.get( 'LIBS', [] )
+ self.env.Append(LIBS = lib_name_list)
+ return oldLIBS
+
+ def SetLIBS(self, val):
+ oldLIBS = self.env.get( 'LIBS', [] )
+ self.env.Replace(LIBS = val)
+ return oldLIBS
+
+ def Display(self, msg):
+ if self.sconf.cached:
+ # We assume that Display is called twice for each test here
+ # once for the Checking for ... message and once for the result.
+ # The self.sconf.cached flag can only be set between those calls
+ msg = "(cached) " + msg
+ self.sconf.cached = 0
+ progress_display(msg, append_newline=0)
+ self.Log("scons: Configure: " + msg + "\n")
+
+ def Log(self, msg):
+ if self.sconf.logstream != None:
+ self.sconf.logstream.write(msg)
+
+ #### End of stuff used by Conftest.py.
+
+
+def SConf(*args, **kw):
+ if kw.get(build_type, True):
+ kw['_depth'] = kw.get('_depth', 0) + 1
+ for bt in build_types:
+ try:
+ del kw[bt]
+ except KeyError:
+ pass
+ return apply(SConfBase, args, kw)
+ else:
+ return SCons.Util.Null()
+
+
+def CheckFunc(context, function_name, header = None, language = None):
+ res = SCons.Conftest.CheckFunc(context, function_name, header = header, language = language)
+ context.did_show_result = 1
+ return not res
+
+def CheckType(context, type_name, includes = "", language = None):
+ res = SCons.Conftest.CheckType(context, type_name,
+ header = includes, language = language)
+ context.did_show_result = 1
+ return not res
+
+def CheckTypeSize(context, type_name, includes = "", language = None, expect = None):
+ res = SCons.Conftest.CheckTypeSize(context, type_name,
+ header = includes, language = language,
+ expect = expect)
+ context.did_show_result = 1
+ return res
+
+def CheckDeclaration(context, declaration, includes = "", language = None):
+ res = SCons.Conftest.CheckDeclaration(context, declaration,
+ includes = includes,
+ language = language)
+ context.did_show_result = 1
+ return not res
+
+def createIncludesFromHeaders(headers, leaveLast, include_quotes = '""'):
+ # used by CheckHeader and CheckLibWithHeader to produce C - #include
+ # statements from the specified header (list)
+ if not SCons.Util.is_List(headers):
+ headers = [headers]
+ l = []
+ if leaveLast:
+ lastHeader = headers[-1]
+ headers = headers[:-1]
+ else:
+ lastHeader = None
+ for s in headers:
+ l.append("#include %s%s%s\n"
+ % (include_quotes[0], s, include_quotes[1]))
+ return string.join(l, ''), lastHeader
+
+def CheckHeader(context, header, include_quotes = '<>', language = None):
+ """
+ A test for a C or C++ header file.
+ """
+ prog_prefix, hdr_to_check = \
+ createIncludesFromHeaders(header, 1, include_quotes)
+ res = SCons.Conftest.CheckHeader(context, hdr_to_check, prog_prefix,
+ language = language,
+ include_quotes = include_quotes)
+ context.did_show_result = 1
+ return not res
+
+def CheckCC(context):
+ res = SCons.Conftest.CheckCC(context)
+ return not res
+
+def CheckCXX(context):
+ res = SCons.Conftest.CheckCXX(context)
+ return not res
+
+def CheckSHCC(context):
+ res = SCons.Conftest.CheckSHCC(context)
+ return not res
+
+def CheckSHCXX(context):
+ res = SCons.Conftest.CheckSHCXX(context)
+ return not res
+
+# Bram: Make this function obsolete? CheckHeader() is more generic.
+
+def CheckCHeader(context, header, include_quotes = '""'):
+ """
+ A test for a C header file.
+ """
+ return CheckHeader(context, header, include_quotes, language = "C")
+
+
+# Bram: Make this function obsolete? CheckHeader() is more generic.
+
+def CheckCXXHeader(context, header, include_quotes = '""'):
+ """
+ A test for a C++ header file.
+ """
+ return CheckHeader(context, header, include_quotes, language = "C++")
+
+
+def CheckLib(context, library = None, symbol = "main",
+ header = None, language = None, autoadd = 1):
+ """
+ A test for a library. See also CheckLibWithHeader.
+ Note that library may also be None to test whether the given symbol
+ compiles without flags.
+ """
+
+ if library == []:
+ library = [None]
+
+ if not SCons.Util.is_List(library):
+ library = [library]
+
+ # ToDo: accept path for the library
+ res = SCons.Conftest.CheckLib(context, library, symbol, header = header,
+ language = language, autoadd = autoadd)
+ context.did_show_result = 1
+ return not res
+
+# XXX
+# Bram: Can only include one header and can't use #ifdef HAVE_HEADER_H.
+
+def CheckLibWithHeader(context, libs, header, language,
+ call = None, autoadd = 1):
+ # ToDo: accept path for library. Support system header files.
+ """
+ Another (more sophisticated) test for a library.
+ Checks, if library and header is available for language (may be 'C'
+ or 'CXX'). Call maybe be a valid expression _with_ a trailing ';'.
+ As in CheckLib, we support library=None, to test if the call compiles
+ without extra link flags.
+ """
+ prog_prefix, dummy = \
+ createIncludesFromHeaders(header, 0)
+ if libs == []:
+ libs = [None]
+
+ if not SCons.Util.is_List(libs):
+ libs = [libs]
+
+ res = SCons.Conftest.CheckLib(context, libs, None, prog_prefix,
+ call = call, language = language, autoadd = autoadd)
+ context.did_show_result = 1
+ return not res
diff --git a/tools/scons/scons-local-1.2.0/SCons/SConsign.py b/tools/scons/scons-local-1.2.0/SCons/SConsign.py
new file mode 100644
index 000000000..8e4c30c14
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/SConsign.py
@@ -0,0 +1,375 @@
+"""SCons.SConsign
+
+Writing and reading information to the .sconsign file or files.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/SConsign.py 3842 2008/12/20 22:59:52 scons"
+
+import cPickle
+import os
+import os.path
+
+import SCons.dblite
+import SCons.Warnings
+
+def corrupt_dblite_warning(filename):
+ SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning,
+ "Ignoring corrupt .sconsign file: %s"%filename)
+
+SCons.dblite.ignore_corrupt_dbfiles = 1
+SCons.dblite.corruption_warning = corrupt_dblite_warning
+
+#XXX Get rid of the global array so this becomes re-entrant.
+sig_files = []
+
+# Info for the database SConsign implementation (now the default):
+# "DataBase" is a dictionary that maps top-level SConstruct directories
+# to open database handles.
+# "DB_Module" is the Python database module to create the handles.
+# "DB_Name" is the base name of the database file (minus any
+# extension the underlying DB module will add).
+DataBase = {}
+DB_Module = SCons.dblite
+DB_Name = ".sconsign"
+DB_sync_list = []
+
+def Get_DataBase(dir):
+ global DataBase, DB_Module, DB_Name
+ top = dir.fs.Top
+ if not os.path.isabs(DB_Name) and top.repositories:
+ mode = "c"
+ for d in [top] + top.repositories:
+ if dir.is_under(d):
+ try:
+ return DataBase[d], mode
+ except KeyError:
+ path = d.entry_abspath(DB_Name)
+ try: db = DataBase[d] = DB_Module.open(path, mode)
+ except (IOError, OSError): pass
+ else:
+ if mode != "r":
+ DB_sync_list.append(db)
+ return db, mode
+ mode = "r"
+ try:
+ return DataBase[top], "c"
+ except KeyError:
+ db = DataBase[top] = DB_Module.open(DB_Name, "c")
+ DB_sync_list.append(db)
+ return db, "c"
+ except TypeError:
+ print "DataBase =", DataBase
+ raise
+
+def Reset():
+ """Reset global state. Used by unit tests that end up using
+ SConsign multiple times to get a clean slate for each test."""
+ global sig_files, DB_sync_list
+ sig_files = []
+ DB_sync_list = []
+
+normcase = os.path.normcase
+
+def write():
+ global sig_files
+ for sig_file in sig_files:
+ sig_file.write(sync=0)
+ for db in DB_sync_list:
+ try:
+ syncmethod = db.sync
+ except AttributeError:
+ pass # Not all anydbm modules have sync() methods.
+ else:
+ syncmethod()
+
+class SConsignEntry:
+ """
+ Wrapper class for the generic entry in a .sconsign file.
+ The Node subclass populates it with attributes as it pleases.
+
+ XXX As coded below, we do expect a '.binfo' attribute to be added,
+ but we'll probably generalize this in the next refactorings.
+ """
+ current_version_id = 1
+ def __init__(self):
+ # Create an object attribute from the class attribute so it ends up
+ # in the pickled data in the .sconsign file.
+ _version_id = self.current_version_id
+ def convert_to_sconsign(self):
+ self.binfo.convert_to_sconsign()
+ def convert_from_sconsign(self, dir, name):
+ self.binfo.convert_from_sconsign(dir, name)
+
+class Base:
+ """
+ This is the controlling class for the signatures for the collection of
+ entries associated with a specific directory. The actual directory
+ association will be maintained by a subclass that is specific to
+ the underlying storage method. This class provides a common set of
+ methods for fetching and storing the individual bits of information
+ that make up signature entry.
+ """
+ def __init__(self):
+ self.entries = {}
+ self.dirty = False
+ self.to_be_merged = {}
+
+ def get_entry(self, filename):
+ """
+ Fetch the specified entry attribute.
+ """
+ return self.entries[filename]
+
+ def set_entry(self, filename, obj):
+ """
+ Set the entry.
+ """
+ self.entries[filename] = obj
+ self.dirty = True
+
+ def do_not_set_entry(self, filename, obj):
+ pass
+
+ def store_info(self, filename, node):
+ entry = node.get_stored_info()
+ entry.binfo.merge(node.get_binfo())
+ self.to_be_merged[filename] = node
+ self.dirty = True
+
+ def do_not_store_info(self, filename, node):
+ pass
+
+ def merge(self):
+ for key, node in self.to_be_merged.items():
+ entry = node.get_stored_info()
+ try:
+ ninfo = entry.ninfo
+ except AttributeError:
+ # This happens with SConf Nodes, because the configuration
+ # subsystem takes direct control over how the build decision
+ # is made and its information stored.
+ pass
+ else:
+ ninfo.merge(node.get_ninfo())
+ self.entries[key] = entry
+ self.to_be_merged = {}
+
+class DB(Base):
+ """
+ A Base subclass that reads and writes signature information
+ from a global .sconsign.db* file--the actual file suffix is
+ determined by the database module.
+ """
+ def __init__(self, dir):
+ Base.__init__(self)
+
+ self.dir = dir
+
+ db, mode = Get_DataBase(dir)
+
+ # Read using the path relative to the top of the Repository
+ # (self.dir.tpath) from which we're fetching the signature
+ # information.
+ path = normcase(dir.tpath)
+ try:
+ rawentries = db[path]
+ except KeyError:
+ pass
+ else:
+ try:
+ self.entries = cPickle.loads(rawentries)
+ if type(self.entries) is not type({}):
+ self.entries = {}
+ raise TypeError
+ except KeyboardInterrupt:
+ raise
+ except Exception, e:
+ SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning,
+ "Ignoring corrupt sconsign entry : %s (%s)\n"%(self.dir.tpath, e))
+ for key, entry in self.entries.items():
+ entry.convert_from_sconsign(dir, key)
+
+ if mode == "r":
+ # This directory is actually under a repository, which means
+ # likely they're reaching in directly for a dependency on
+ # a file there. Don't actually set any entry info, so we
+ # won't try to write to that .sconsign.dblite file.
+ self.set_entry = self.do_not_set_entry
+ self.store_info = self.do_not_store_info
+
+ global sig_files
+ sig_files.append(self)
+
+ def write(self, sync=1):
+ if not self.dirty:
+ return
+
+ self.merge()
+
+ db, mode = Get_DataBase(self.dir)
+
+ # Write using the path relative to the top of the SConstruct
+ # directory (self.dir.path), not relative to the top of
+ # the Repository; we only write to our own .sconsign file,
+ # not to .sconsign files in Repositories.
+ path = normcase(self.dir.path)
+ for key, entry in self.entries.items():
+ entry.convert_to_sconsign()
+ db[path] = cPickle.dumps(self.entries, 1)
+
+ if sync:
+ try:
+ syncmethod = db.sync
+ except AttributeError:
+ # Not all anydbm modules have sync() methods.
+ pass
+ else:
+ syncmethod()
+
+class Dir(Base):
+ def __init__(self, fp=None, dir=None):
+ """
+ fp - file pointer to read entries from
+ """
+ Base.__init__(self)
+
+ if not fp:
+ return
+
+ self.entries = cPickle.load(fp)
+ if type(self.entries) is not type({}):
+ self.entries = {}
+ raise TypeError
+
+ if dir:
+ for key, entry in self.entries.items():
+ entry.convert_from_sconsign(dir, key)
+
+class DirFile(Dir):
+ """
+ Encapsulates reading and writing a per-directory .sconsign file.
+ """
+ def __init__(self, dir):
+ """
+ dir - the directory for the file
+ """
+
+ self.dir = dir
+ self.sconsign = os.path.join(dir.path, '.sconsign')
+
+ try:
+ fp = open(self.sconsign, 'rb')
+ except IOError:
+ fp = None
+
+ try:
+ Dir.__init__(self, fp, dir)
+ except KeyboardInterrupt:
+ raise
+ except:
+ SCons.Warnings.warn(SCons.Warnings.CorruptSConsignWarning,
+ "Ignoring corrupt .sconsign file: %s"%self.sconsign)
+
+ global sig_files
+ sig_files.append(self)
+
+ def write(self, sync=1):
+ """
+ Write the .sconsign file to disk.
+
+ Try to write to a temporary file first, and rename it if we
+ succeed. If we can't write to the temporary file, it's
+ probably because the directory isn't writable (and if so,
+ how did we build anything in this directory, anyway?), so
+ try to write directly to the .sconsign file as a backup.
+ If we can't rename, try to copy the temporary contents back
+ to the .sconsign file. Either way, always try to remove
+ the temporary file at the end.
+ """
+ if not self.dirty:
+ return
+
+ self.merge()
+
+ temp = os.path.join(self.dir.path, '.scons%d' % os.getpid())
+ try:
+ file = open(temp, 'wb')
+ fname = temp
+ except IOError:
+ try:
+ file = open(self.sconsign, 'wb')
+ fname = self.sconsign
+ except IOError:
+ return
+ for key, entry in self.entries.items():
+ entry.convert_to_sconsign()
+ cPickle.dump(self.entries, file, 1)
+ file.close()
+ if fname != self.sconsign:
+ try:
+ mode = os.stat(self.sconsign)[0]
+ os.chmod(self.sconsign, 0666)
+ os.unlink(self.sconsign)
+ except (IOError, OSError):
+ # Try to carry on in the face of either OSError
+ # (things like permission issues) or IOError (disk
+ # or network issues). If there's a really dangerous
+ # issue, it should get re-raised by the calls below.
+ pass
+ try:
+ os.rename(fname, self.sconsign)
+ except OSError:
+ # An OSError failure to rename may indicate something
+ # like the directory has no write permission, but
+ # the .sconsign file itself might still be writable,
+ # so try writing on top of it directly. An IOError
+ # here, or in any of the following calls, would get
+ # raised, indicating something like a potentially
+ # serious disk or network issue.
+ open(self.sconsign, 'wb').write(open(fname, 'rb').read())
+ os.chmod(self.sconsign, mode)
+ try:
+ os.unlink(temp)
+ except (IOError, OSError):
+ pass
+
+ForDirectory = DB
+
+def File(name, dbm_module=None):
+ """
+ Arrange for all signatures to be stored in a global .sconsign.db*
+ file.
+ """
+ global ForDirectory, DB_Name, DB_Module
+ if name is None:
+ ForDirectory = DirFile
+ DB_Module = None
+ else:
+ ForDirectory = DB
+ DB_Name = name
+ if not dbm_module is None:
+ DB_Module = dbm_module
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/C.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/C.py
new file mode 100644
index 000000000..926493e76
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/C.py
@@ -0,0 +1,126 @@
+"""SCons.Scanner.C
+
+This module implements the depenency scanner for C/C++ code.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/C.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Node.FS
+import SCons.Scanner
+import SCons.Util
+
+import SCons.cpp
+
+class SConsCPPScanner(SCons.cpp.PreProcessor):
+ """
+ SCons-specific subclass of the cpp.py module's processing.
+
+ We subclass this so that: 1) we can deal with files represented
+ by Nodes, not strings; 2) we can keep track of the files that are
+ missing.
+ """
+ def __init__(self, *args, **kw):
+ apply(SCons.cpp.PreProcessor.__init__, (self,)+args, kw)
+ self.missing = []
+ def initialize_result(self, fname):
+ self.result = SCons.Util.UniqueList([fname])
+ def finalize_result(self, fname):
+ return self.result[1:]
+ def find_include_file(self, t):
+ keyword, quote, fname = t
+ result = SCons.Node.FS.find_file(fname, self.searchpath[quote])
+ if not result:
+ self.missing.append((fname, self.current_file))
+ return result
+ def read_file(self, file):
+ try:
+ fp = open(str(file.rfile()))
+ except EnvironmentError, e:
+ self.missing.append((file, self.current_file))
+ return ''
+ else:
+ return fp.read()
+
+def dictify_CPPDEFINES(env):
+ cppdefines = env.get('CPPDEFINES', {})
+ if cppdefines is None:
+ return {}
+ if SCons.Util.is_Sequence(cppdefines):
+ result = {}
+ for c in cppdefines:
+ if SCons.Util.is_Sequence(c):
+ result[c[0]] = c[1]
+ else:
+ result[c] = None
+ return result
+ if not SCons.Util.is_Dict(cppdefines):
+ return {cppdefines : None}
+ return cppdefines
+
+class SConsCPPScannerWrapper:
+ """
+ The SCons wrapper around a cpp.py scanner.
+
+ This is the actual glue between the calling conventions of generic
+ SCons scanners, and the (subclass of) cpp.py class that knows how
+ to look for #include lines with reasonably real C-preprocessor-like
+ evaluation of #if/#ifdef/#else/#elif lines.
+ """
+ def __init__(self, name, variable):
+ self.name = name
+ self.path = SCons.Scanner.FindPathDirs(variable)
+ def __call__(self, node, env, path = ()):
+ cpp = SConsCPPScanner(current = node.get_dir(),
+ cpppath = path,
+ dict = dictify_CPPDEFINES(env))
+ result = cpp(node)
+ for included, includer in cpp.missing:
+ fmt = "No dependency generated for file: %s (included from: %s) -- file not found"
+ SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
+ fmt % (included, includer))
+ return result
+
+ def recurse_nodes(self, nodes):
+ return nodes
+ def select(self, node):
+ return self
+
+def CScanner():
+ """Return a prototype Scanner instance for scanning source files
+ that use the C pre-processor"""
+
+ # Here's how we would (or might) use the CPP scanner code above that
+ # knows how to evaluate #if/#ifdef/#else/#elif lines when searching
+ # for #includes. This is commented out for now until we add the
+ # right configurability to let users pick between the scanners.
+ #return SConsCPPScannerWrapper("CScanner", "CPPPATH")
+
+ cs = SCons.Scanner.ClassicCPP("CScanner",
+ "$CPPSUFFIXES",
+ "CPPPATH",
+ '^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")')
+ return cs
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/D.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/D.py
new file mode 100644
index 000000000..97ece3a18
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/D.py
@@ -0,0 +1,68 @@
+"""SCons.Scanner.D
+
+Scanner for the Digital Mars "D" programming language.
+
+Coded by Andy Friesen
+17 Nov 2003
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/D.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+
+import SCons.Scanner
+
+def DScanner():
+ """Return a prototype Scanner instance for scanning D source files"""
+ ds = D()
+ return ds
+
+class D(SCons.Scanner.Classic):
+ def __init__ (self):
+ SCons.Scanner.Classic.__init__ (self,
+ name = "DScanner",
+ suffixes = '$DSUFFIXES',
+ path_variable = 'DPATH',
+ regex = 'import\s+(?:[a-zA-Z0-9_.]+)\s*(?:,\s*(?:[a-zA-Z0-9_.]+)\s*)*;')
+
+ self.cre2 = re.compile ('(?:import\s)?\s*([a-zA-Z0-9_.]+)\s*(?:,|;)', re.M)
+
+ def find_include(self, include, source_dir, path):
+ # translate dots (package separators) to slashes
+ inc = string.replace(include, '.', '/')
+
+ i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path)
+ if i is None:
+ i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path)
+ return i, include
+
+ def find_include_names(self, node):
+ includes = []
+ for i in self.cre.findall(node.get_contents()):
+ includes = includes + self.cre2.findall(i)
+ return includes
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/Dir.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/Dir.py
new file mode 100644
index 000000000..35d500861
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/Dir.py
@@ -0,0 +1,105 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/Dir.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Node.FS
+import SCons.Scanner
+
+def only_dirs(nodes):
+ is_Dir = lambda n: isinstance(n.disambiguate(), SCons.Node.FS.Dir)
+ return filter(is_Dir, nodes)
+
+def DirScanner(**kw):
+ """Return a prototype Scanner instance for scanning
+ directories for on-disk files"""
+ kw['node_factory'] = SCons.Node.FS.Entry
+ kw['recursive'] = only_dirs
+ return apply(SCons.Scanner.Base, (scan_on_disk, "DirScanner"), kw)
+
+def DirEntryScanner(**kw):
+ """Return a prototype Scanner instance for "scanning"
+ directory Nodes for their in-memory entries"""
+ kw['node_factory'] = SCons.Node.FS.Entry
+ kw['recursive'] = None
+ return apply(SCons.Scanner.Base, (scan_in_memory, "DirEntryScanner"), kw)
+
+skip_entry = {}
+
+skip_entry_list = [
+ '.',
+ '..',
+ '.sconsign',
+ # Used by the native dblite.py module.
+ '.sconsign.dblite',
+ # Used by dbm and dumbdbm.
+ '.sconsign.dir',
+ # Used by dbm.
+ '.sconsign.pag',
+ # Used by dumbdbm.
+ '.sconsign.dat',
+ '.sconsign.bak',
+ # Used by some dbm emulations using Berkeley DB.
+ '.sconsign.db',
+]
+
+for skip in skip_entry_list:
+ skip_entry[skip] = 1
+ skip_entry[SCons.Node.FS._my_normcase(skip)] = 1
+
+do_not_scan = lambda k: not skip_entry.has_key(k)
+
+def scan_on_disk(node, env, path=()):
+ """
+ Scans a directory for on-disk files and directories therein.
+
+ Looking up the entries will add these to the in-memory Node tree
+ representation of the file system, so all we have to do is just
+ that and then call the in-memory scanning function.
+ """
+ try:
+ flist = node.fs.listdir(node.abspath)
+ except (IOError, OSError):
+ return []
+ e = node.Entry
+ for f in filter(do_not_scan, flist):
+ # Add ./ to the beginning of the file name so if it begins with a
+ # '#' we don't look it up relative to the top-level directory.
+ e('./' + f)
+ return scan_in_memory(node, env, path)
+
+def scan_in_memory(node, env, path=()):
+ """
+ "Scans" a Node.FS.Dir for its in-memory entries.
+ """
+ try:
+ entries = node.entries
+ except AttributeError:
+ # It's not a Node.FS.Dir (or doesn't look enough like one for
+ # our purposes), which can happen if a target list containing
+ # mixed Node types (Dirs and Files, for example) has a Dir as
+ # the first entry.
+ return []
+ entry_list = filter(do_not_scan, entries.keys())
+ entry_list.sort()
+ return map(lambda n, e=entries: e[n], entry_list)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/Fortran.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/Fortran.py
new file mode 100644
index 000000000..e629b80b0
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/Fortran.py
@@ -0,0 +1,314 @@
+"""SCons.Scanner.Fortran
+
+This module implements the dependency scanner for Fortran code.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/Fortran.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+
+import SCons.Node
+import SCons.Node.FS
+import SCons.Scanner
+import SCons.Util
+import SCons.Warnings
+
+class F90Scanner(SCons.Scanner.Classic):
+ """
+ A Classic Scanner subclass for Fortran source files which takes
+ into account both USE and INCLUDE statements. This scanner will
+ work for both F77 and F90 (and beyond) compilers.
+
+ Currently, this scanner assumes that the include files do not contain
+ USE statements. To enable the ability to deal with USE statements
+ in include files, add logic right after the module names are found
+ to loop over each include file, search for and locate each USE
+ statement, and append each module name to the list of dependencies.
+ Caching the search results in a common dictionary somewhere so that
+ the same include file is not searched multiple times would be a
+ smart thing to do.
+ """
+
+ def __init__(self, name, suffixes, path_variable,
+ use_regex, incl_regex, def_regex, *args, **kw):
+
+ self.cre_use = re.compile(use_regex, re.M)
+ self.cre_incl = re.compile(incl_regex, re.M)
+ self.cre_def = re.compile(def_regex, re.M)
+
+ def _scan(node, env, path, self=self):
+ node = node.rfile()
+
+ if not node.exists():
+ return []
+
+ return self.scan(node, env, path)
+
+ kw['function'] = _scan
+ kw['path_function'] = SCons.Scanner.FindPathDirs(path_variable)
+ kw['recursive'] = 1
+ kw['skeys'] = suffixes
+ kw['name'] = name
+
+ apply(SCons.Scanner.Current.__init__, (self,) + args, kw)
+
+ def scan(self, node, env, path=()):
+
+ # cache the includes list in node so we only scan it once:
+ if node.includes != None:
+ mods_and_includes = node.includes
+ else:
+ # retrieve all included filenames
+ includes = self.cre_incl.findall(node.get_contents())
+ # retrieve all USE'd module names
+ modules = self.cre_use.findall(node.get_contents())
+ # retrieve all defined module names
+ defmodules = self.cre_def.findall(node.get_contents())
+
+ # Remove all USE'd module names that are defined in the same file
+ d = {}
+ for m in defmodules:
+ d[m] = 1
+ modules = filter(lambda m, d=d: not d.has_key(m), modules)
+ #modules = self.undefinedModules(modules, defmodules)
+
+ # Convert module name to a .mod filename
+ suffix = env.subst('$FORTRANMODSUFFIX')
+ modules = map(lambda x, s=suffix: string.lower(x) + s, modules)
+ # Remove unique items from the list
+ mods_and_includes = SCons.Util.unique(includes+modules)
+ node.includes = mods_and_includes
+
+ # This is a hand-coded DSU (decorate-sort-undecorate, or
+ # Schwartzian transform) pattern. The sort key is the raw name
+ # of the file as specifed on the USE or INCLUDE line, which lets
+ # us keep the sort order constant regardless of whether the file
+ # is actually found in a Repository or locally.
+ nodes = []
+ source_dir = node.get_dir()
+ if callable(path):
+ path = path()
+ for dep in mods_and_includes:
+ n, i = self.find_include(dep, source_dir, path)
+
+ if n is None:
+ SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
+ "No dependency generated for file: %s (referenced by: %s) -- file not found" % (i, node))
+ else:
+ sortkey = self.sort_key(dep)
+ nodes.append((sortkey, n))
+
+ nodes.sort()
+ nodes = map(lambda pair: pair[1], nodes)
+ return nodes
+
+def FortranScan(path_variable="FORTRANPATH"):
+ """Return a prototype Scanner instance for scanning source files
+ for Fortran USE & INCLUDE statements"""
+
+# The USE statement regex matches the following:
+#
+# USE module_name
+# USE :: module_name
+# USE, INTRINSIC :: module_name
+# USE, NON_INTRINSIC :: module_name
+#
+# Limitations
+#
+# -- While the regex can handle multiple USE statements on one line,
+# it cannot properly handle them if they are commented out.
+# In either of the following cases:
+#
+# ! USE mod_a ; USE mod_b [entire line is commented out]
+# USE mod_a ! ; USE mod_b [in-line comment of second USE statement]
+#
+# the second module name (mod_b) will be picked up as a dependency
+# even though it should be ignored. The only way I can see
+# to rectify this would be to modify the scanner to eliminate
+# the call to re.findall, read in the contents of the file,
+# treating the comment character as an end-of-line character
+# in addition to the normal linefeed, loop over each line,
+# weeding out the comments, and looking for the USE statements.
+# One advantage to this is that the regex passed to the scanner
+# would no longer need to match a semicolon.
+#
+# -- I question whether or not we need to detect dependencies to
+# INTRINSIC modules because these are built-in to the compiler.
+# If we consider them a dependency, will SCons look for them, not
+# find them, and kill the build? Or will we there be standard
+# compiler-specific directories we will need to point to so the
+# compiler and SCons can locate the proper object and mod files?
+
+# Here is a breakdown of the regex:
+#
+# (?i) : regex is case insensitive
+# ^ : start of line
+# (?: : group a collection of regex symbols without saving the match as a "group"
+# ^|; : matches either the start of the line or a semicolon - semicolon
+# ) : end the unsaved grouping
+# \s* : any amount of white space
+# USE : match the string USE, case insensitive
+# (?: : group a collection of regex symbols without saving the match as a "group"
+# \s+| : match one or more whitespace OR .... (the next entire grouped set of regex symbols)
+# (?: : group a collection of regex symbols without saving the match as a "group"
+# (?: : establish another unsaved grouping of regex symbols
+# \s* : any amount of white space
+# , : match a comma
+# \s* : any amount of white space
+# (?:NON_)? : optionally match the prefix NON_, case insensitive
+# INTRINSIC : match the string INTRINSIC, case insensitive
+# )? : optionally match the ", INTRINSIC/NON_INTRINSIC" grouped expression
+# \s* : any amount of white space
+# :: : match a double colon that must appear after the INTRINSIC/NON_INTRINSIC attribute
+# ) : end the unsaved grouping
+# ) : end the unsaved grouping
+# \s* : match any amount of white space
+# (\w+) : match the module name that is being USE'd
+#
+#
+ use_regex = "(?i)(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"
+
+
+# The INCLUDE statement regex matches the following:
+#
+# INCLUDE 'some_Text'
+# INCLUDE "some_Text"
+# INCLUDE "some_Text" ; INCLUDE "some_Text"
+# INCLUDE kind_"some_Text"
+# INCLUDE kind_'some_Text"
+#
+# where some_Text can include any alphanumeric and/or special character
+# as defined by the Fortran 2003 standard.
+#
+# Limitations:
+#
+# -- The Fortran standard dictates that a " or ' in the INCLUDE'd
+# string must be represented as a "" or '', if the quotes that wrap
+# the entire string are either a ' or ", respectively. While the
+# regular expression below can detect the ' or " characters just fine,
+# the scanning logic, presently is unable to detect them and reduce
+# them to a single instance. This probably isn't an issue since,
+# in practice, ' or " are not generally used in filenames.
+#
+# -- This regex will not properly deal with multiple INCLUDE statements
+# when the entire line has been commented out, ala
+#
+# ! INCLUDE 'some_file' ; INCLUDE 'some_file'
+#
+# In such cases, it will properly ignore the first INCLUDE file,
+# but will actually still pick up the second. Interestingly enough,
+# the regex will properly deal with these cases:
+#
+# INCLUDE 'some_file'
+# INCLUDE 'some_file' !; INCLUDE 'some_file'
+#
+# To get around the above limitation, the FORTRAN programmer could
+# simply comment each INCLUDE statement separately, like this
+#
+# ! INCLUDE 'some_file' !; INCLUDE 'some_file'
+#
+# The way I see it, the only way to get around this limitation would
+# be to modify the scanning logic to replace the calls to re.findall
+# with a custom loop that processes each line separately, throwing
+# away fully commented out lines before attempting to match against
+# the INCLUDE syntax.
+#
+# Here is a breakdown of the regex:
+#
+# (?i) : regex is case insensitive
+# (?: : begin a non-saving group that matches the following:
+# ^ : either the start of the line
+# | : or
+# ['">]\s*; : a semicolon that follows a single quote,
+# double quote or greater than symbol (with any
+# amount of whitespace in between). This will
+# allow the regex to match multiple INCLUDE
+# statements per line (although it also requires
+# the positive lookahead assertion that is
+# used below). It will even properly deal with
+# (i.e. ignore) cases in which the additional
+# INCLUDES are part of an in-line comment, ala
+# " INCLUDE 'someFile' ! ; INCLUDE 'someFile2' "
+# ) : end of non-saving group
+# \s* : any amount of white space
+# INCLUDE : match the string INCLUDE, case insensitive
+# \s+ : match one or more white space characters
+# (?\w+_)? : match the optional "kind-param _" prefix allowed by the standard
+# [<"'] : match the include delimiter - an apostrophe, double quote, or less than symbol
+# (.+?) : match one or more characters that make up
+# the included path and file name and save it
+# in a group. The Fortran standard allows for
+# any non-control character to be used. The dot
+# operator will pick up any character, including
+# control codes, but I can't conceive of anyone
+# putting control codes in their file names.
+# The question mark indicates it is non-greedy so
+# that regex will match only up to the next quote,
+# double quote, or greater than symbol
+# (?=["'>]) : positive lookahead assertion to match the include
+# delimiter - an apostrophe, double quote, or
+# greater than symbol. This level of complexity
+# is required so that the include delimiter is
+# not consumed by the match, thus allowing the
+# sub-regex discussed above to uniquely match a
+# set of semicolon-separated INCLUDE statements
+# (as allowed by the F2003 standard)
+
+ include_regex = """(?i)(?:^|['">]\s*;)\s*INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
+
+# The MODULE statement regex finds module definitions by matching
+# the following:
+#
+# MODULE module_name
+#
+# but *not* the following:
+#
+# MODULE PROCEDURE procedure_name
+#
+# Here is a breakdown of the regex:
+#
+# (?i) : regex is case insensitive
+# ^\s* : any amount of white space
+# MODULE : match the string MODULE, case insensitive
+# \s+ : match one or more white space characters
+# (?!PROCEDURE) : but *don't* match if the next word matches
+# PROCEDURE (negative lookahead assertion),
+# case insensitive
+# (\w+) : match one or more alphanumeric characters
+# that make up the defined module name and
+# save it in a group
+
+ def_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)"""
+
+ scanner = F90Scanner("FortranScan",
+ "$FORTRANSUFFIXES",
+ path_variable,
+ use_regex,
+ include_regex,
+ def_regex)
+ return scanner
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/IDL.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/IDL.py
new file mode 100644
index 000000000..9bd172873
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/IDL.py
@@ -0,0 +1,42 @@
+"""SCons.Scanner.IDL
+
+This module implements the depenency scanner for IDL (Interface
+Definition Language) files.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/IDL.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Node.FS
+import SCons.Scanner
+
+def IDLScan():
+ """Return a prototype Scanner instance for scanning IDL source files"""
+ cs = SCons.Scanner.ClassicCPP("IDLScan",
+ "$IDLSUFFIXES",
+ "CPPPATH",
+ '^[ \t]*(?:#[ \t]*include|[ \t]*import)[ \t]+(<|")([^>"]+)(>|")')
+ return cs
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/LaTeX.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/LaTeX.py
new file mode 100644
index 000000000..3e17e2548
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/LaTeX.py
@@ -0,0 +1,334 @@
+"""SCons.Scanner.LaTeX
+
+This module implements the dependency scanner for LaTeX code.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/LaTeX.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+import re
+
+import SCons.Scanner
+import SCons.Util
+
+# list of graphics file extensions for TeX and LaTeX
+TexGraphics = ['.eps', '.ps']
+LatexGraphics = ['.pdf', '.png', '.jpg', '.gif', '.tif']
+
+# Used as a return value of modify_env_var if the variable is not set.
+class _Null:
+ pass
+_null = _Null
+
+# The user specifies the paths in env[variable], similar to other builders.
+# They may be relative and must be converted to absolute, as expected
+# by LaTeX and Co. The environment may already have some paths in
+# env['ENV'][var]. These paths are honored, but the env[var] paths have
+# higher precedence. All changes are un-done on exit.
+def modify_env_var(env, var, abspath):
+ try:
+ save = env['ENV'][var]
+ except KeyError:
+ save = _null
+ env.PrependENVPath(var, abspath)
+ try:
+ if SCons.Util.is_List(env[var]):
+ #TODO(1.5)
+ #env.PrependENVPath(var, [os.path.abspath(str(p)) for p in env[var]])
+ env.PrependENVPath(var, map(lambda p: os.path.abspath(str(p)), env[var]))
+ else:
+ # Split at os.pathsep to convert into absolute path
+ #TODO(1.5) env.PrependENVPath(var, [os.path.abspath(p) for p in str(env[var]).split(os.pathsep)])
+ env.PrependENVPath(var, map(lambda p: os.path.abspath(p), string.split(str(env[var]), os.pathsep)))
+ except KeyError:
+ pass
+
+ # Convert into a string explicitly to append ":" (without which it won't search system
+ # paths as well). The problem is that env.AppendENVPath(var, ":")
+ # does not work, refuses to append ":" (os.pathsep).
+
+ if SCons.Util.is_List(env['ENV'][var]):
+ # TODO(1.5)
+ #env['ENV'][var] = os.pathsep.join(env['ENV'][var])
+ env['ENV'][var] = string.join(env['ENV'][var], os.pathsep)
+ # Append the trailing os.pathsep character here to catch the case with no env[var]
+ env['ENV'][var] = env['ENV'][var] + os.pathsep
+
+ return save
+
+class FindENVPathDirs:
+ """A class to bind a specific *PATH variable name to a function that
+ will return all of the *path directories."""
+ def __init__(self, variable):
+ self.variable = variable
+ def __call__(self, env, dir=None, target=None, source=None, argument=None):
+ import SCons.PathList
+ try:
+ path = env['ENV'][self.variable]
+ except KeyError:
+ return ()
+
+ dir = dir or env.fs._cwd
+ path = SCons.PathList.PathList(path).subst_path(env, target, source)
+ return tuple(dir.Rfindalldirs(path))
+
+
+
+def LaTeXScanner():
+ """Return a prototype Scanner instance for scanning LaTeX source files
+ when built with latex.
+ """
+ ds = LaTeX(name = "LaTeXScanner",
+ suffixes = '$LATEXSUFFIXES',
+ # in the search order, see below in LaTeX class docstring
+ graphics_extensions = TexGraphics,
+ recursive = 0)
+ return ds
+
+def PDFLaTeXScanner():
+ """Return a prototype Scanner instance for scanning LaTeX source files
+ when built with pdflatex.
+ """
+ ds = LaTeX(name = "PDFLaTeXScanner",
+ suffixes = '$LATEXSUFFIXES',
+ # in the search order, see below in LaTeX class docstring
+ graphics_extensions = LatexGraphics,
+ recursive = 0)
+ return ds
+
+class LaTeX(SCons.Scanner.Base):
+ """Class for scanning LaTeX files for included files.
+
+ Unlike most scanners, which use regular expressions that just
+ return the included file name, this returns a tuple consisting
+ of the keyword for the inclusion ("include", "includegraphics",
+ "input", or "bibliography"), and then the file name itself.
+ Based on a quick look at LaTeX documentation, it seems that we
+ should append .tex suffix for the "include" keywords, append .tex if
+ there is no extension for the "input" keyword, and need to add .bib
+ for the "bibliography" keyword that does not accept extensions by itself.
+
+ Finally, if there is no extension for an "includegraphics" keyword
+ latex will append .ps or .eps to find the file, while pdftex may use .pdf,
+ .jpg, .tif, .mps, or .png.
+
+ The actual subset and search order may be altered by
+ DeclareGraphicsExtensions command. This complication is ignored.
+ The default order corresponds to experimentation with teTeX
+ $ latex --version
+ pdfeTeX 3.141592-1.21a-2.2 (Web2C 7.5.4)
+ kpathsea version 3.5.4
+ The order is:
+ ['.eps', '.ps'] for latex
+ ['.png', '.pdf', '.jpg', '.tif'].
+
+ Another difference is that the search path is determined by the type
+ of the file being searched:
+ env['TEXINPUTS'] for "input" and "include" keywords
+ env['TEXINPUTS'] for "includegraphics" keyword
+ env['BIBINPUTS'] for "bibliography" keyword
+ env['BSTINPUTS'] for "bibliographystyle" keyword
+
+ FIXME: also look for the class or style in document[class|style]{}
+ FIXME: also look for the argument of bibliographystyle{}
+ """
+ keyword_paths = {'include': 'TEXINPUTS',
+ 'input': 'TEXINPUTS',
+ 'includegraphics': 'TEXINPUTS',
+ 'bibliography': 'BIBINPUTS',
+ 'bibliographystyle': 'BSTINPUTS',
+ 'usepackage': 'TEXINPUTS'}
+ env_variables = SCons.Util.unique(keyword_paths.values())
+
+ def __init__(self, name, suffixes, graphics_extensions, *args, **kw):
+
+ # We have to include \n with the % we exclude from the first part
+ # part of the regex because the expression is compiled with re.M.
+ # Without the \n, the ^ could match the beginning of a *previous*
+ # line followed by one or more newline characters (i.e. blank
+ # lines), interfering with a match on the next line.
+ regex = r'^[^%\n]*\\(include|includegraphics(?:\[[^\]]+\])?|input|bibliography|usepackage){([^}]*)}'
+ self.cre = re.compile(regex, re.M)
+ self.graphics_extensions = graphics_extensions
+
+ def _scan(node, env, path=(), self=self):
+ node = node.rfile()
+ if not node.exists():
+ return []
+ return self.scan(node, path)
+
+ class FindMultiPathDirs:
+ """The stock FindPathDirs function has the wrong granularity:
+ it is called once per target, while we need the path that depends
+ on what kind of included files is being searched. This wrapper
+ hides multiple instances of FindPathDirs, one per the LaTeX path
+ variable in the environment. When invoked, the function calculates
+ and returns all the required paths as a dictionary (converted into
+ a tuple to become hashable). Then the scan function converts it
+ back and uses a dictionary of tuples rather than a single tuple
+ of paths.
+ """
+ def __init__(self, dictionary):
+ self.dictionary = {}
+ for k,n in dictionary.items():
+ self.dictionary[k] = ( SCons.Scanner.FindPathDirs(n),
+ FindENVPathDirs(n) )
+
+ def __call__(self, env, dir=None, target=None, source=None,
+ argument=None):
+ di = {}
+ for k,(c,cENV) in self.dictionary.items():
+ di[k] = ( c(env, dir=None, target=None, source=None,
+ argument=None) ,
+ cENV(env, dir=None, target=None, source=None,
+ argument=None) )
+ # To prevent "dict is not hashable error"
+ return tuple(di.items())
+
+ class LaTeXScanCheck:
+ """Skip all but LaTeX source files, i.e., do not scan *.eps,
+ *.pdf, *.jpg, etc.
+ """
+ def __init__(self, suffixes):
+ self.suffixes = suffixes
+ def __call__(self, node, env):
+ current = not node.has_builder() or node.is_up_to_date()
+ scannable = node.get_suffix() in env.subst_list(self.suffixes)[0]
+ # Returning false means that the file is not scanned.
+ return scannable and current
+
+ kw['function'] = _scan
+ kw['path_function'] = FindMultiPathDirs(LaTeX.keyword_paths)
+ kw['recursive'] = 1
+ kw['skeys'] = suffixes
+ kw['scan_check'] = LaTeXScanCheck(suffixes)
+ kw['name'] = name
+
+ apply(SCons.Scanner.Base.__init__, (self,) + args, kw)
+
+ def _latex_names(self, include):
+ filename = include[1]
+ if include[0] == 'input':
+ base, ext = os.path.splitext( filename )
+ if ext == "":
+ return [filename + '.tex']
+ if (include[0] == 'include'):
+ return [filename + '.tex']
+ if include[0] == 'bibliography':
+ base, ext = os.path.splitext( filename )
+ if ext == "":
+ return [filename + '.bib']
+ if include[0] == 'usepackage':
+ base, ext = os.path.splitext( filename )
+ if ext == "":
+ return [filename + '.sty']
+ if include[0] == 'includegraphics':
+ base, ext = os.path.splitext( filename )
+ if ext == "":
+ #TODO(1.5) return [filename + e for e in self.graphics_extensions]
+ return map(lambda e, f=filename: f+e, self.graphics_extensions)
+ return [filename]
+
+ def sort_key(self, include):
+ return SCons.Node.FS._my_normcase(str(include))
+
+ def find_include(self, include, source_dir, path):
+ try:
+ sub_path = path[include[0]]
+ except (IndexError, KeyError):
+ sub_path = ()
+ try_names = self._latex_names(include)
+ for n in try_names:
+ # see if we find it using the path in env[var]
+ i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[0])
+ if i:
+ return i, include
+ # see if we find it using the path in env['ENV'][var]
+ i = SCons.Node.FS.find_file(n, (source_dir,) + sub_path[1])
+ if i:
+ return i, include
+ return i, include
+
+ def scan(self, node, path=()):
+ # Modify the default scan function to allow for the regular
+ # expression to return a comma separated list of file names
+ # as can be the case with the bibliography keyword.
+
+ # Cache the includes list in node so we only scan it once:
+ path_dict = dict(list(path))
+ noopt_cre = re.compile('\[.*$')
+ if node.includes != None:
+ includes = node.includes
+ else:
+ includes = self.cre.findall(node.get_contents())
+ # 1. Split comma-separated lines, e.g.
+ # ('bibliography', 'phys,comp')
+ # should become two entries
+ # ('bibliography', 'phys')
+ # ('bibliography', 'comp')
+ # 2. Remove the options, e.g., such as
+ # ('includegraphics[clip,width=0.7\\linewidth]', 'picture.eps')
+ # should become
+ # ('includegraphics', 'picture.eps')
+ split_includes = []
+ for include in includes:
+ inc_type = noopt_cre.sub('', include[0])
+ inc_list = string.split(include[1],',')
+ for j in range(len(inc_list)):
+ split_includes.append( (inc_type, inc_list[j]) )
+ #
+ includes = split_includes
+ node.includes = includes
+
+ # This is a hand-coded DSU (decorate-sort-undecorate, or
+ # Schwartzian transform) pattern. The sort key is the raw name
+ # of the file as specifed on the \include, \input, etc. line.
+ # TODO: what about the comment in the original Classic scanner:
+ # """which lets
+ # us keep the sort order constant regardless of whether the file
+ # is actually found in a Repository or locally."""
+ nodes = []
+ source_dir = node.get_dir()
+ for include in includes:
+ #
+ # Handle multiple filenames in include[1]
+ #
+ n, i = self.find_include(include, source_dir, path_dict)
+ if n is None:
+ # Do not bother with 'usepackage' warnings, as they most
+ # likely refer to system-level files
+ if include[0] != 'usepackage':
+ SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
+ "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
+ else:
+ sortkey = self.sort_key(n)
+ nodes.append((sortkey, n))
+ #
+ nodes.sort()
+ nodes = map(lambda pair: pair[1], nodes)
+ return nodes
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/Prog.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/Prog.py
new file mode 100644
index 000000000..ad71ba449
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/Prog.py
@@ -0,0 +1,97 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/Prog.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+
+import SCons.Node
+import SCons.Node.FS
+import SCons.Scanner
+import SCons.Util
+
+# global, set by --debug=findlibs
+print_find_libs = None
+
+def ProgramScanner(**kw):
+ """Return a prototype Scanner instance for scanning executable
+ files for static-lib dependencies"""
+ kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH')
+ ps = apply(SCons.Scanner.Base, [scan, "ProgramScanner"], kw)
+ return ps
+
+def scan(node, env, libpath = ()):
+ """
+ This scanner scans program files for static-library
+ dependencies. It will search the LIBPATH environment variable
+ for libraries specified in the LIBS variable, returning any
+ files it finds as dependencies.
+ """
+ try:
+ libs = env['LIBS']
+ except KeyError:
+ # There are no LIBS in this environment, so just return a null list:
+ return []
+ if SCons.Util.is_String(libs):
+ libs = string.split(libs)
+ else:
+ libs = SCons.Util.flatten(libs)
+
+ try:
+ prefix = env['LIBPREFIXES']
+ if not SCons.Util.is_List(prefix):
+ prefix = [ prefix ]
+ except KeyError:
+ prefix = [ '' ]
+
+ try:
+ suffix = env['LIBSUFFIXES']
+ if not SCons.Util.is_List(suffix):
+ suffix = [ suffix ]
+ except KeyError:
+ suffix = [ '' ]
+
+ pairs = []
+ for suf in map(env.subst, suffix):
+ for pref in map(env.subst, prefix):
+ pairs.append((pref, suf))
+
+ result = []
+
+ if callable(libpath):
+ libpath = libpath()
+
+ find_file = SCons.Node.FS.find_file
+ adjustixes = SCons.Util.adjustixes
+ for lib in libs:
+ if SCons.Util.is_String(lib):
+ lib = env.subst(lib)
+ for pref, suf in pairs:
+ l = adjustixes(lib, pref, suf)
+ l = find_file(l, libpath, verbose=print_find_libs)
+ if l:
+ result.append(l)
+ else:
+ result.append(lib)
+
+ return result
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/RC.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/RC.py
new file mode 100644
index 000000000..ecbc57256
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/RC.py
@@ -0,0 +1,49 @@
+"""SCons.Scanner.RC
+
+This module implements the depenency scanner for RC (Interface
+Definition Language) files.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/RC.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Node.FS
+import SCons.Scanner
+import re
+
+def RCScan():
+ """Return a prototype Scanner instance for scanning RC source files"""
+
+ res_re= r'^(?:\s*#\s*(?:include)|' \
+ '.*?\s+(?:ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)' \
+ '\s*.*?)' \
+ '\s*(<|"| )([^>"\s]+)(?:[>" ])*$'
+ resScanner = SCons.Scanner.ClassicCPP( "ResourceScanner",
+ "$RCSUFFIXES",
+ "CPPPATH",
+ res_re )
+
+ return resScanner
diff --git a/tools/scons/scons-local-1.2.0/SCons/Scanner/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Scanner/__init__.py
new file mode 100644
index 000000000..e18f0fe30
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Scanner/__init__.py
@@ -0,0 +1,406 @@
+"""SCons.Scanner
+
+The Scanner package for the SCons software construction utility.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Scanner/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+
+import SCons.Node.FS
+import SCons.Util
+
+
+class _Null:
+ pass
+
+# This is used instead of None as a default argument value so None can be
+# used as an actual argument value.
+_null = _Null
+
+def Scanner(function, *args, **kw):
+ """
+ Public interface factory function for creating different types
+ of Scanners based on the different types of "functions" that may
+ be supplied.
+
+ TODO: Deprecate this some day. We've moved the functionality
+ inside the Base class and really don't need this factory function
+ any more. It was, however, used by some of our Tool modules, so
+ the call probably ended up in various people's custom modules
+ patterned on SCons code.
+ """
+ if SCons.Util.is_Dict(function):
+ return apply(Selector, (function,) + args, kw)
+ else:
+ return apply(Base, (function,) + args, kw)
+
+
+
+class FindPathDirs:
+ """A class to bind a specific *PATH variable name to a function that
+ will return all of the *path directories."""
+ def __init__(self, variable):
+ self.variable = variable
+ def __call__(self, env, dir=None, target=None, source=None, argument=None):
+ import SCons.PathList
+ try:
+ path = env[self.variable]
+ except KeyError:
+ return ()
+
+ dir = dir or env.fs._cwd
+ path = SCons.PathList.PathList(path).subst_path(env, target, source)
+ return tuple(dir.Rfindalldirs(path))
+
+
+
+class Base:
+ """
+ The base class for dependency scanners. This implements
+ straightforward, single-pass scanning of a single file.
+ """
+
+ def __init__(self,
+ function,
+ name = "NONE",
+ argument = _null,
+ skeys = _null,
+ path_function = None,
+ node_class = SCons.Node.FS.Entry,
+ node_factory = None,
+ scan_check = None,
+ recursive = None):
+ """
+ Construct a new scanner object given a scanner function.
+
+ 'function' - a scanner function taking two or three
+ arguments and returning a list of strings.
+
+ 'name' - a name for identifying this scanner object.
+
+ 'argument' - an optional argument that, if specified, will be
+ passed to both the scanner function and the path_function.
+
+ 'skeys' - an optional list argument that can be used to determine
+ which scanner should be used for a given Node. In the case of File
+ nodes, for example, the 'skeys' would be file suffixes.
+
+ 'path_function' - a function that takes four or five arguments
+ (a construction environment, Node for the directory containing
+ the SConscript file that defined the primary target, list of
+ target nodes, list of source nodes, and optional argument for
+ this instance) and returns a tuple of the directories that can
+ be searched for implicit dependency files. May also return a
+ callable() which is called with no args and returns the tuple
+ (supporting Bindable class).
+
+ 'node_class' - the class of Nodes which this scan will return.
+ If node_class is None, then this scanner will not enforce any
+ Node conversion and will return the raw results from the
+ underlying scanner function.
+
+ 'node_factory' - the factory function to be called to translate
+ the raw results returned by the scanner function into the
+ expected node_class objects.
+
+ 'scan_check' - a function to be called to first check whether
+ this node really needs to be scanned.
+
+ 'recursive' - specifies that this scanner should be invoked
+ recursively on all of the implicit dependencies it returns
+ (the canonical example being #include lines in C source files).
+ May be a callable, which will be called to filter the list
+ of nodes found to select a subset for recursive scanning
+ (the canonical example being only recursively scanning
+ subdirectories within a directory).
+
+ The scanner function's first argument will be a Node that should
+ be scanned for dependencies, the second argument will be an
+ Environment object, the third argument will be the tuple of paths
+ returned by the path_function, and the fourth argument will be
+ the value passed into 'argument', and the returned list should
+ contain the Nodes for all the direct dependencies of the file.
+
+ Examples:
+
+ s = Scanner(my_scanner_function)
+
+ s = Scanner(function = my_scanner_function)
+
+ s = Scanner(function = my_scanner_function, argument = 'foo')
+
+ """
+
+ # Note: this class could easily work with scanner functions that take
+ # something other than a filename as an argument (e.g. a database
+ # node) and a dependencies list that aren't file names. All that
+ # would need to be changed is the documentation.
+
+ self.function = function
+ self.path_function = path_function
+ self.name = name
+ self.argument = argument
+
+ if skeys is _null:
+ if SCons.Util.is_Dict(function):
+ skeys = function.keys()
+ else:
+ skeys = []
+ self.skeys = skeys
+
+ self.node_class = node_class
+ self.node_factory = node_factory
+ self.scan_check = scan_check
+ if callable(recursive):
+ self.recurse_nodes = recursive
+ elif recursive:
+ self.recurse_nodes = self._recurse_all_nodes
+ else:
+ self.recurse_nodes = self._recurse_no_nodes
+
+ def path(self, env, dir=None, target=None, source=None):
+ if not self.path_function:
+ return ()
+ if not self.argument is _null:
+ return self.path_function(env, dir, target, source, self.argument)
+ else:
+ return self.path_function(env, dir, target, source)
+
+ def __call__(self, node, env, path = ()):
+ """
+ This method scans a single object. 'node' is the node
+ that will be passed to the scanner function, and 'env' is the
+ environment that will be passed to the scanner function. A list of
+ direct dependency nodes for the specified node will be returned.
+ """
+ if self.scan_check and not self.scan_check(node, env):
+ return []
+
+ self = self.select(node)
+
+ if not self.argument is _null:
+ list = self.function(node, env, path, self.argument)
+ else:
+ list = self.function(node, env, path)
+
+ kw = {}
+ if hasattr(node, 'dir'):
+ kw['directory'] = node.dir
+ node_factory = env.get_factory(self.node_factory)
+ nodes = []
+ for l in list:
+ if self.node_class and not isinstance(l, self.node_class):
+ l = apply(node_factory, (l,), kw)
+ nodes.append(l)
+ return nodes
+
+ def __cmp__(self, other):
+ try:
+ return cmp(self.__dict__, other.__dict__)
+ except AttributeError:
+ # other probably doesn't have a __dict__
+ return cmp(self.__dict__, other)
+
+ def __hash__(self):
+ return id(self)
+
+ def __str__(self):
+ return self.name
+
+ def add_skey(self, skey):
+ """Add a skey to the list of skeys"""
+ self.skeys.append(skey)
+
+ def get_skeys(self, env=None):
+ if env and SCons.Util.is_String(self.skeys):
+ return env.subst_list(self.skeys)[0]
+ return self.skeys
+
+ def select(self, node):
+ if SCons.Util.is_Dict(self.function):
+ key = node.scanner_key()
+ try:
+ return self.function[key]
+ except KeyError:
+ return None
+ else:
+ return self
+
+ def _recurse_all_nodes(self, nodes):
+ return nodes
+
+ def _recurse_no_nodes(self, nodes):
+ return []
+
+ recurse_nodes = _recurse_no_nodes
+
+ def add_scanner(self, skey, scanner):
+ self.function[skey] = scanner
+ self.add_skey(skey)
+
+
+class Selector(Base):
+ """
+ A class for selecting a more specific scanner based on the
+ scanner_key() (suffix) for a specific Node.
+
+ TODO: This functionality has been moved into the inner workings of
+ the Base class, and this class will be deprecated at some point.
+ (It was never exposed directly as part of the public interface,
+ although it is used by the Scanner() factory function that was
+ used by various Tool modules and therefore was likely a template
+ for custom modules that may be out there.)
+ """
+ def __init__(self, dict, *args, **kw):
+ apply(Base.__init__, (self, None,)+args, kw)
+ self.dict = dict
+ self.skeys = dict.keys()
+
+ def __call__(self, node, env, path = ()):
+ return self.select(node)(node, env, path)
+
+ def select(self, node):
+ try:
+ return self.dict[node.scanner_key()]
+ except KeyError:
+ return None
+
+ def add_scanner(self, skey, scanner):
+ self.dict[skey] = scanner
+ self.add_skey(skey)
+
+
+class Current(Base):
+ """
+ A class for scanning files that are source files (have no builder)
+ or are derived files and are current (which implies that they exist,
+ either locally or in a repository).
+ """
+
+ def __init__(self, *args, **kw):
+ def current_check(node, env):
+ return not node.has_builder() or node.is_up_to_date()
+ kw['scan_check'] = current_check
+ apply(Base.__init__, (self,) + args, kw)
+
+class Classic(Current):
+ """
+ A Scanner subclass to contain the common logic for classic CPP-style
+ include scanning, but which can be customized to use different
+ regular expressions to find the includes.
+
+ Note that in order for this to work "out of the box" (without
+ overriding the find_include() and sort_key() methods), the regular
+ expression passed to the constructor must return the name of the
+ include file in group 0.
+ """
+
+ def __init__(self, name, suffixes, path_variable, regex, *args, **kw):
+
+ self.cre = re.compile(regex, re.M)
+
+ def _scan(node, env, path=(), self=self):
+ node = node.rfile()
+ if not node.exists():
+ return []
+ return self.scan(node, path)
+
+ kw['function'] = _scan
+ kw['path_function'] = FindPathDirs(path_variable)
+ kw['recursive'] = 1
+ kw['skeys'] = suffixes
+ kw['name'] = name
+
+ apply(Current.__init__, (self,) + args, kw)
+
+ def find_include(self, include, source_dir, path):
+ n = SCons.Node.FS.find_file(include, (source_dir,) + tuple(path))
+ return n, include
+
+ def sort_key(self, include):
+ return SCons.Node.FS._my_normcase(include)
+
+ def find_include_names(self, node):
+ return self.cre.findall(node.get_contents())
+
+ def scan(self, node, path=()):
+
+ # cache the includes list in node so we only scan it once:
+ if node.includes != None:
+ includes = node.includes
+ else:
+ includes = self.find_include_names (node)
+ node.includes = includes
+
+ # This is a hand-coded DSU (decorate-sort-undecorate, or
+ # Schwartzian transform) pattern. The sort key is the raw name
+ # of the file as specifed on the #include line (including the
+ # " or <, since that may affect what file is found), which lets
+ # us keep the sort order constant regardless of whether the file
+ # is actually found in a Repository or locally.
+ nodes = []
+ source_dir = node.get_dir()
+ if callable(path):
+ path = path()
+ for include in includes:
+ n, i = self.find_include(include, source_dir, path)
+
+ if n is None:
+ SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
+ "No dependency generated for file: %s (included from: %s) -- file not found" % (i, node))
+ else:
+ sortkey = self.sort_key(include)
+ nodes.append((sortkey, n))
+
+ nodes.sort()
+ nodes = map(lambda pair: pair[1], nodes)
+ return nodes
+
+class ClassicCPP(Classic):
+ """
+ A Classic Scanner subclass which takes into account the type of
+ bracketing used to include the file, and uses classic CPP rules
+ for searching for the files based on the bracketing.
+
+ Note that in order for this to work, the regular expression passed
+ to the constructor must return the leading bracket in group 0, and
+ the contained filename in group 1.
+ """
+ def find_include(self, include, source_dir, path):
+ if include[0] == '"':
+ paths = (source_dir,) + tuple(path)
+ else:
+ paths = tuple(path) + (source_dir,)
+
+ n = SCons.Node.FS.find_file(include[1], paths)
+
+ return n, include[1]
+
+ def sort_key(self, include):
+ return SCons.Node.FS._my_normcase(string.join(include))
diff --git a/tools/scons/scons-local-1.2.0/SCons/Script/Interactive.py b/tools/scons/scons-local-1.2.0/SCons/Script/Interactive.py
new file mode 100644
index 000000000..13cc41409
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Script/Interactive.py
@@ -0,0 +1,376 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Script/Interactive.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """
+SCons interactive mode
+"""
+
+# TODO:
+#
+# This has the potential to grow into something with a really big life
+# of its own, which might or might not be a good thing. Nevertheless,
+# here are some enhancements that will probably be requested some day
+# and are worth keeping in mind (assuming this takes off):
+#
+# - A command to re-read / re-load the SConscript files. This may
+# involve allowing people to specify command-line options (e.g. -f,
+# -I, --no-site-dir) that affect how the SConscript files are read.
+#
+# - Additional command-line options on the "build" command.
+#
+# Of the supported options that seemed to make sense (after a quick
+# pass through the list), the ones that seemed likely enough to be
+# used are listed in the man page and have explicit test scripts.
+#
+# These had code changed in Script/Main.py to support them, but didn't
+# seem likely to be used regularly, so had no test scripts added:
+#
+# build --diskcheck=*
+# build --implicit-cache=*
+# build --implicit-deps-changed=*
+# build --implicit-deps-unchanged=*
+#
+# These look like they should "just work" with no changes to the
+# existing code, but like those above, look unlikely to be used and
+# therefore had no test scripts added:
+#
+# build --random
+#
+# These I'm not sure about. They might be useful for individual
+# "build" commands, and may even work, but they seem unlikely enough
+# that we'll wait until they're requested before spending any time on
+# writing test scripts for them, or investigating whether they work.
+#
+# build -q [??? is there a useful analog to the exit status?]
+# build --duplicate=
+# build --profile=
+# build --max-drift=
+# build --warn=*
+# build --Y
+#
+# - Most of the SCons command-line options that the "build" command
+# supports should be settable as default options that apply to all
+# subsequent "build" commands. Maybe a "set {option}" command that
+# maps to "SetOption('{option}')".
+#
+# - Need something in the 'help' command that prints the -h output.
+#
+# - A command to run the configure subsystem separately (must see how
+# this interacts with the new automake model).
+#
+# - Command-line completion of target names; maybe even of SCons options?
+# Completion is something that's supported by the Python cmd module,
+# so this should be doable without too much trouble.
+#
+
+import cmd
+import copy
+import os
+import re
+import shlex
+import string
+import sys
+
+try:
+ import readline
+except ImportError:
+ pass
+
+class SConsInteractiveCmd(cmd.Cmd):
+ """\
+ build [TARGETS] Build the specified TARGETS and their dependencies.
+ 'b' is a synonym.
+ clean [TARGETS] Clean (remove) the specified TARGETS and their
+ dependencies. 'c' is a synonym.
+ exit Exit SCons interactive mode.
+ help [COMMAND] Prints help for the specified COMMAND. 'h' and
+ '?' are synonyms.
+ shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and '!'
+ are synonyms.
+ version Prints SCons version information.
+ """
+
+ synonyms = {
+ 'b' : 'build',
+ 'c' : 'clean',
+ 'h' : 'help',
+ 'scons' : 'build',
+ 'sh' : 'shell',
+ }
+
+ def __init__(self, **kw):
+ cmd.Cmd.__init__(self)
+ for key, val in kw.items():
+ setattr(self, key, val)
+
+ if sys.platform == 'win32':
+ self.shell_variable = 'COMSPEC'
+ else:
+ self.shell_variable = 'SHELL'
+
+ def default(self, argv):
+ print "*** Unknown command: %s" % argv[0]
+
+ def onecmd(self, line):
+ line = string.strip(line)
+ if not line:
+ print self.lastcmd
+ return self.emptyline()
+ self.lastcmd = line
+ if line[0] == '!':
+ line = 'shell ' + line[1:]
+ elif line[0] == '?':
+ line = 'help ' + line[1:]
+ if os.sep == '\\':
+ line = string.replace(line, '\\', '\\\\')
+ argv = shlex.split(line)
+ argv[0] = self.synonyms.get(argv[0], argv[0])
+ if not argv[0]:
+ return self.default(line)
+ else:
+ try:
+ func = getattr(self, 'do_' + argv[0])
+ except AttributeError:
+ return self.default(argv)
+ return func(argv)
+
+ def do_build(self, argv):
+ """\
+ build [TARGETS] Build the specified TARGETS and their
+ dependencies. 'b' is a synonym.
+ """
+ import SCons.Node
+ import SCons.SConsign
+ import SCons.Script.Main
+
+ options = copy.deepcopy(self.options)
+
+ options, targets = self.parser.parse_args(argv[1:], values=options)
+
+ SCons.Script.COMMAND_LINE_TARGETS = targets
+
+ if targets:
+ SCons.Script.BUILD_TARGETS = targets
+ else:
+ # If the user didn't specify any targets on the command line,
+ # use the list of default targets.
+ SCons.Script.BUILD_TARGETS = SCons.Script._build_plus_default
+
+ nodes = SCons.Script.Main._build_targets(self.fs,
+ options,
+ targets,
+ self.target_top)
+
+ if not nodes:
+ return
+
+ # Call each of the Node's alter_targets() methods, which may
+ # provide additional targets that ended up as part of the build
+ # (the canonical example being a VariantDir() when we're building
+ # from a source directory) and which we therefore need their
+ # state cleared, too.
+ x = []
+ for n in nodes:
+ x.extend(n.alter_targets()[0])
+ nodes.extend(x)
+
+ # Clean up so that we can perform the next build correctly.
+ #
+ # We do this by walking over all the children of the targets,
+ # and clearing their state.
+ #
+ # We currently have to re-scan each node to find their
+ # children, because built nodes have already been partially
+ # cleared and don't remember their children. (In scons
+ # 0.96.1 and earlier, this wasn't the case, and we didn't
+ # have to re-scan the nodes.)
+ #
+ # Because we have to re-scan each node, we can't clear the
+ # nodes as we walk over them, because we may end up rescanning
+ # a cleared node as we scan a later node. Therefore, only
+ # store the list of nodes that need to be cleared as we walk
+ # the tree, and clear them in a separate pass.
+ #
+ # XXX: Someone more familiar with the inner workings of scons
+ # may be able to point out a more efficient way to do this.
+
+ SCons.Script.Main.progress_display("scons: Clearing cached node information ...")
+
+ seen_nodes = {}
+
+ def get_unseen_children(node, parent, seen_nodes=seen_nodes):
+ def is_unseen(node, seen_nodes=seen_nodes):
+ return not seen_nodes.has_key(node)
+ return filter(is_unseen, node.children(scan=1))
+
+ def add_to_seen_nodes(node, parent, seen_nodes=seen_nodes):
+ seen_nodes[node] = 1
+
+ # If this file is in a VariantDir and has a
+ # corresponding source file in the source tree, remember the
+ # node in the source tree, too. This is needed in
+ # particular to clear cached implicit dependencies on the
+ # source file, since the scanner will scan it if the
+ # VariantDir was created with duplicate=0.
+ try:
+ rfile_method = node.rfile
+ except AttributeError:
+ return
+ else:
+ rfile = rfile_method()
+ if rfile != node:
+ seen_nodes[rfile] = 1
+
+ for node in nodes:
+ walker = SCons.Node.Walker(node,
+ kids_func=get_unseen_children,
+ eval_func=add_to_seen_nodes)
+ n = walker.next()
+ while n:
+ n = walker.next()
+
+ for node in seen_nodes.keys():
+ # Call node.clear() to clear most of the state
+ node.clear()
+ # node.clear() doesn't reset node.state, so call
+ # node.set_state() to reset it manually
+ node.set_state(SCons.Node.no_state)
+ node.implicit = None
+
+ # Debug: Uncomment to verify that all Taskmaster reference
+ # counts have been reset to zero.
+ #if node.ref_count != 0:
+ # from SCons.Debug import Trace
+ # Trace('node %s, ref_count %s !!!\n' % (node, node.ref_count))
+
+ SCons.SConsign.Reset()
+ SCons.Script.Main.progress_display("scons: done clearing node information.")
+
+ def do_clean(self, argv):
+ """\
+ clean [TARGETS] Clean (remove) the specified TARGETS
+ and their dependencies. 'c' is a synonym.
+ """
+ return self.do_build(['build', '--clean'] + argv[1:])
+
+ def do_EOF(self, argv):
+ print
+ self.do_exit(argv)
+
+ def _do_one_help(self, arg):
+ try:
+ # If help_<arg>() exists, then call it.
+ func = getattr(self, 'help_' + arg)
+ except AttributeError:
+ try:
+ func = getattr(self, 'do_' + arg)
+ except AttributeError:
+ doc = None
+ else:
+ doc = self._doc_to_help(func)
+ if doc:
+ sys.stdout.write(doc + '\n')
+ sys.stdout.flush()
+ else:
+ doc = self.strip_initial_spaces(func())
+ if doc:
+ sys.stdout.write(doc + '\n')
+ sys.stdout.flush()
+
+ def _doc_to_help(self, obj):
+ doc = obj.__doc__
+ if doc is None:
+ return ''
+ return self._strip_initial_spaces(doc)
+
+ def _strip_initial_spaces(self, s):
+ #lines = s.split('\n')
+ lines = string.split(s, '\n')
+ spaces = re.match(' *', lines[0]).group(0)
+ #def strip_spaces(l):
+ # if l.startswith(spaces):
+ # l = l[len(spaces):]
+ # return l
+ #return '\n'.join([ strip_spaces(l) for l in lines ])
+ def strip_spaces(l, spaces=spaces):
+ if l[:len(spaces)] == spaces:
+ l = l[len(spaces):]
+ return l
+ lines = map(strip_spaces, lines)
+ return string.join(lines, '\n')
+
+ def do_exit(self, argv):
+ """\
+ exit Exit SCons interactive mode.
+ """
+ sys.exit(0)
+
+ def do_help(self, argv):
+ """\
+ help [COMMAND] Prints help for the specified COMMAND. 'h'
+ and '?' are synonyms.
+ """
+ if argv[1:]:
+ for arg in argv[1:]:
+ if self._do_one_help(arg):
+ break
+ else:
+ # If bare 'help' is called, print this class's doc
+ # string (if it has one).
+ doc = self._doc_to_help(self.__class__)
+ if doc:
+ sys.stdout.write(doc + '\n')
+ sys.stdout.flush()
+
+ def do_shell(self, argv):
+ """\
+ shell [COMMANDLINE] Execute COMMANDLINE in a subshell. 'sh' and
+ '!' are synonyms.
+ """
+ import subprocess
+ argv = argv[1:]
+ if not argv:
+ argv = os.environ[self.shell_variable]
+ try:
+ p = subprocess.Popen(argv)
+ except EnvironmentError, e:
+ sys.stderr.write('scons: %s: %s\n' % (argv[0], e.strerror))
+ else:
+ p.wait()
+
+ def do_version(self, argv):
+ """\
+ version Prints SCons version information.
+ """
+ sys.stdout.write(self.parser.version + '\n')
+
+def interact(fs, parser, options, targets, target_top):
+ c = SConsInteractiveCmd(prompt = 'scons>>> ',
+ fs = fs,
+ parser = parser,
+ options = options,
+ targets = targets,
+ target_top = target_top)
+ c.cmdloop()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Script/Main.py b/tools/scons/scons-local-1.2.0/SCons/Script/Main.py
new file mode 100644
index 000000000..5624038c0
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Script/Main.py
@@ -0,0 +1,1321 @@
+"""SCons.Script
+
+This file implements the main() function used by the scons script.
+
+Architecturally, this *is* the scons script, and will likely only be
+called from the external "scons" wrapper. Consequently, anything here
+should not be, or be considered, part of the build engine. If it's
+something that we expect other software to want to use, it should go in
+some other module. If it's specific to the "scons" script invocation,
+it goes here.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Script/Main.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+import sys
+import time
+import traceback
+
+# Strip the script directory from sys.path() so on case-insensitive
+# (Windows) systems Python doesn't think that the "scons" script is the
+# "SCons" package. Replace it with our own version directory so, if
+# if they're there, we pick up the right version of the build engine
+# modules.
+#sys.path = [os.path.join(sys.prefix,
+# 'lib',
+# 'scons-%d' % SCons.__version__)] + sys.path[1:]
+
+import SCons.CacheDir
+import SCons.Debug
+import SCons.Defaults
+import SCons.Environment
+import SCons.Errors
+import SCons.Job
+import SCons.Node
+import SCons.Node.FS
+import SCons.SConf
+import SCons.Script
+import SCons.Taskmaster
+import SCons.Util
+import SCons.Warnings
+
+import SCons.Script.Interactive
+
+def fetch_win32_parallel_msg():
+ # A subsidiary function that exists solely to isolate this import
+ # so we don't have to pull it in on all platforms, and so that an
+ # in-line "import" statement in the _main() function below doesn't
+ # cause warnings about local names shadowing use of the 'SCons'
+ # globl in nest scopes and UnboundLocalErrors and the like in some
+ # versions (2.1) of Python.
+ import SCons.Platform.win32
+ return SCons.Platform.win32.parallel_msg
+
+#
+
+class SConsPrintHelpException(Exception):
+ pass
+
+display = SCons.Util.display
+progress_display = SCons.Util.DisplayEngine()
+
+first_command_start = None
+last_command_end = None
+
+class Progressor:
+ prev = ''
+ count = 0
+ target_string = '$TARGET'
+
+ def __init__(self, obj, interval=1, file=None, overwrite=False):
+ if file is None:
+ file = sys.stdout
+
+ self.obj = obj
+ self.file = file
+ self.interval = interval
+ self.overwrite = overwrite
+
+ if callable(obj):
+ self.func = obj
+ elif SCons.Util.is_List(obj):
+ self.func = self.spinner
+ elif string.find(obj, self.target_string) != -1:
+ self.func = self.replace_string
+ else:
+ self.func = self.string
+
+ def write(self, s):
+ self.file.write(s)
+ self.file.flush()
+ self.prev = s
+
+ def erase_previous(self):
+ if self.prev:
+ length = len(self.prev)
+ if self.prev[-1] in ('\n', '\r'):
+ length = length - 1
+ self.write(' ' * length + '\r')
+ self.prev = ''
+
+ def spinner(self, node):
+ self.write(self.obj[self.count % len(self.obj)])
+
+ def string(self, node):
+ self.write(self.obj)
+
+ def replace_string(self, node):
+ self.write(string.replace(self.obj, self.target_string, str(node)))
+
+ def __call__(self, node):
+ self.count = self.count + 1
+ if (self.count % self.interval) == 0:
+ if self.overwrite:
+ self.erase_previous()
+ self.func(node)
+
+ProgressObject = SCons.Util.Null()
+
+def Progress(*args, **kw):
+ global ProgressObject
+ ProgressObject = apply(Progressor, args, kw)
+
+# Task control.
+#
+
+_BuildFailures = []
+
+def GetBuildFailures():
+ return _BuildFailures
+
+class BuildTask(SCons.Taskmaster.Task):
+ """An SCons build task."""
+ progress = ProgressObject
+
+ def display(self, message):
+ display('scons: ' + message)
+
+ def prepare(self):
+ self.progress(self.targets[0])
+ return SCons.Taskmaster.Task.prepare(self)
+
+ def needs_execute(self):
+ target = self.targets[0]
+ if target.get_state() == SCons.Node.executing:
+ return True
+ else:
+ if self.top and target.has_builder():
+ display("scons: `%s' is up to date." % str(self.node))
+ return False
+
+ def execute(self):
+ if print_time:
+ start_time = time.time()
+ global first_command_start
+ if first_command_start is None:
+ first_command_start = start_time
+ SCons.Taskmaster.Task.execute(self)
+ if print_time:
+ global cumulative_command_time
+ global last_command_end
+ finish_time = time.time()
+ last_command_end = finish_time
+ cumulative_command_time = cumulative_command_time+finish_time-start_time
+ sys.stdout.write("Command execution time: %f seconds\n"%(finish_time-start_time))
+
+ def do_failed(self, status=2):
+ _BuildFailures.append(self.exception[1])
+ global exit_status
+ global this_build_status
+ if self.options.ignore_errors:
+ SCons.Taskmaster.Task.executed(self)
+ elif self.options.keep_going:
+ SCons.Taskmaster.Task.fail_continue(self)
+ exit_status = status
+ this_build_status = status
+ else:
+ SCons.Taskmaster.Task.fail_stop(self)
+ exit_status = status
+ this_build_status = status
+
+ def executed(self):
+ t = self.targets[0]
+ if self.top and not t.has_builder() and not t.side_effect:
+ if not t.exists():
+ errstr="Do not know how to make target `%s'." % t
+ sys.stderr.write("scons: *** " + errstr)
+ if not self.options.keep_going:
+ sys.stderr.write(" Stop.")
+ sys.stderr.write("\n")
+ try:
+ raise SCons.Errors.BuildError(t, errstr)
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.exception_set()
+ self.do_failed()
+ else:
+ print "scons: Nothing to be done for `%s'." % t
+ SCons.Taskmaster.Task.executed(self)
+ else:
+ SCons.Taskmaster.Task.executed(self)
+
+ def failed(self):
+ # Handle the failure of a build task. The primary purpose here
+ # is to display the various types of Errors and Exceptions
+ # appropriately.
+ exc_info = self.exc_info()
+ try:
+ t, e, tb = exc_info
+ except ValueError:
+ t, e = exc_info
+ tb = None
+
+ if t is None:
+ # The Taskmaster didn't record an exception for this Task;
+ # see if the sys module has one.
+ try:
+ t, e, tb = sys.exc_info()[:]
+ except ValueError:
+ t, e = exc_info
+ tb = None
+
+ # Deprecated string exceptions will have their string stored
+ # in the first entry of the tuple.
+ if e is None:
+ e = t
+
+ buildError = SCons.Errors.convert_to_BuildError(e)
+ if not buildError.node:
+ buildError.node = self.node
+
+ node = buildError.node
+ if not SCons.Util.is_List(node):
+ node = [ node ]
+ nodename = string.join(map(str, node), ', ')
+
+ errfmt = "scons: *** [%s] %s\n"
+ sys.stderr.write(errfmt % (nodename, buildError))
+
+ if (buildError.exc_info[2] and buildError.exc_info[1] and
+ # TODO(1.5)
+ #not isinstance(
+ # buildError.exc_info[1],
+ # (EnvironmentError, SCons.Errors.StopError, SCons.Errors.UserError))):
+ not isinstance(buildError.exc_info[1], EnvironmentError) and
+ not isinstance(buildError.exc_info[1], SCons.Errors.StopError) and
+ not isinstance(buildError.exc_info[1], SCons.Errors.UserError)):
+ type, value, trace = buildError.exc_info
+ traceback.print_exception(type, value, trace)
+ elif tb and print_stacktrace:
+ sys.stderr.write("scons: internal stack trace:\n")
+ traceback.print_tb(tb, file=sys.stderr)
+
+ self.exception = (e, buildError, tb) # type, value, traceback
+ self.do_failed(buildError.exitstatus)
+
+ self.exc_clear()
+
+ def postprocess(self):
+ if self.top:
+ t = self.targets[0]
+ for tp in self.options.tree_printers:
+ tp.display(t)
+ if self.options.debug_includes:
+ tree = t.render_include_tree()
+ if tree:
+ print
+ print tree
+ SCons.Taskmaster.Task.postprocess(self)
+
+ def make_ready(self):
+ """Make a task ready for execution"""
+ SCons.Taskmaster.Task.make_ready(self)
+ if self.out_of_date and self.options.debug_explain:
+ explanation = self.out_of_date[0].explain()
+ if explanation:
+ sys.stdout.write("scons: " + explanation)
+
+class CleanTask(SCons.Taskmaster.Task):
+ """An SCons clean task."""
+ def fs_delete(self, path, pathstr, remove=1):
+ try:
+ if os.path.exists(path):
+ if os.path.isfile(path):
+ if remove: os.unlink(path)
+ display("Removed " + pathstr)
+ elif os.path.isdir(path) and not os.path.islink(path):
+ # delete everything in the dir
+ entries = os.listdir(path)
+ # Sort for deterministic output (os.listdir() Can
+ # return entries in a random order).
+ entries.sort()
+ for e in entries:
+ p = os.path.join(path, e)
+ s = os.path.join(pathstr, e)
+ if os.path.isfile(p):
+ if remove: os.unlink(p)
+ display("Removed " + s)
+ else:
+ self.fs_delete(p, s, remove)
+ # then delete dir itself
+ if remove: os.rmdir(path)
+ display("Removed directory " + pathstr)
+ except (IOError, OSError), e:
+ print "scons: Could not remove '%s':" % pathstr, e.strerror
+
+ def show(self):
+ target = self.targets[0]
+ if (target.has_builder() or target.side_effect) and not target.noclean:
+ for t in self.targets:
+ if not t.isdir():
+ display("Removed " + str(t))
+ if SCons.Environment.CleanTargets.has_key(target):
+ files = SCons.Environment.CleanTargets[target]
+ for f in files:
+ self.fs_delete(f.abspath, str(f), 0)
+
+ def remove(self):
+ target = self.targets[0]
+ if (target.has_builder() or target.side_effect) and not target.noclean:
+ for t in self.targets:
+ try:
+ removed = t.remove()
+ except OSError, e:
+ # An OSError may indicate something like a permissions
+ # issue, an IOError would indicate something like
+ # the file not existing. In either case, print a
+ # message and keep going to try to remove as many
+ # targets aa possible.
+ print "scons: Could not remove '%s':" % str(t), e.strerror
+ else:
+ if removed:
+ display("Removed " + str(t))
+ if SCons.Environment.CleanTargets.has_key(target):
+ files = SCons.Environment.CleanTargets[target]
+ for f in files:
+ self.fs_delete(f.abspath, str(f))
+
+ execute = remove
+
+ # We want the Taskmaster to update the Node states (and therefore
+ # handle reference counts, etc.), but we don't want to call
+ # back to the Node's post-build methods, which would do things
+ # we don't want, like store .sconsign information.
+ executed = SCons.Taskmaster.Task.executed_without_callbacks
+
+ # Have the taskmaster arrange to "execute" all of the targets, because
+ # we'll figure out ourselves (in remove() or show() above) whether
+ # anything really needs to be done.
+ make_ready = SCons.Taskmaster.Task.make_ready_all
+
+ def prepare(self):
+ pass
+
+class QuestionTask(SCons.Taskmaster.Task):
+ """An SCons task for the -q (question) option."""
+ def prepare(self):
+ pass
+
+ def execute(self):
+ if self.targets[0].get_state() != SCons.Node.up_to_date or \
+ (self.top and not self.targets[0].exists()):
+ global exit_status
+ global this_build_status
+ exit_status = 1
+ this_build_status = 1
+ self.tm.stop()
+
+ def executed(self):
+ pass
+
+
+class TreePrinter:
+ def __init__(self, derived=False, prune=False, status=False):
+ self.derived = derived
+ self.prune = prune
+ self.status = status
+ def get_all_children(self, node):
+ return node.all_children()
+ def get_derived_children(self, node):
+ children = node.all_children(None)
+ return filter(lambda x: x.has_builder(), children)
+ def display(self, t):
+ if self.derived:
+ func = self.get_derived_children
+ else:
+ func = self.get_all_children
+ s = self.status and 2 or 0
+ SCons.Util.print_tree(t, func, prune=self.prune, showtags=s)
+
+
+def python_version_string():
+ return string.split(sys.version)[0]
+
+def python_version_unsupported(version=sys.version_info):
+ return version < (1, 5, 2)
+
+def python_version_deprecated(version=sys.version_info):
+ return version < (2, 2, 0)
+
+
+# Global variables
+
+print_objects = 0
+print_memoizer = 0
+print_stacktrace = 0
+print_time = 0
+sconscript_time = 0
+cumulative_command_time = 0
+exit_status = 0 # final exit status, assume success by default
+this_build_status = 0 # "exit status" of an individual build
+num_jobs = None
+delayed_warnings = []
+
+class FakeOptionParser:
+ """
+ A do-nothing option parser, used for the initial OptionsParser variable.
+
+ During normal SCons operation, the OptionsParser is created right
+ away by the main() function. Certain tests scripts however, can
+ introspect on different Tool modules, the initialization of which
+ can try to add a new, local option to an otherwise uninitialized
+ OptionsParser object. This allows that introspection to happen
+ without blowing up.
+
+ """
+ class FakeOptionValues:
+ def __getattr__(self, attr):
+ return None
+ values = FakeOptionValues()
+ def add_local_option(self, *args, **kw):
+ pass
+
+OptionsParser = FakeOptionParser()
+
+def AddOption(*args, **kw):
+ if not kw.has_key('default'):
+ kw['default'] = None
+ result = apply(OptionsParser.add_local_option, args, kw)
+ return result
+
+def GetOption(name):
+ return getattr(OptionsParser.values, name)
+
+def SetOption(name, value):
+ return OptionsParser.values.set_option(name, value)
+
+#
+class Stats:
+ def __init__(self):
+ self.stats = []
+ self.labels = []
+ self.append = self.do_nothing
+ self.print_stats = self.do_nothing
+ def enable(self, outfp):
+ self.outfp = outfp
+ self.append = self.do_append
+ self.print_stats = self.do_print
+ def do_nothing(self, *args, **kw):
+ pass
+
+class CountStats(Stats):
+ def do_append(self, label):
+ self.labels.append(label)
+ self.stats.append(SCons.Debug.fetchLoggedInstances())
+ def do_print(self):
+ stats_table = {}
+ for s in self.stats:
+ for n in map(lambda t: t[0], s):
+ stats_table[n] = [0, 0, 0, 0]
+ i = 0
+ for s in self.stats:
+ for n, c in s:
+ stats_table[n][i] = c
+ i = i + 1
+ keys = stats_table.keys()
+ keys.sort()
+ self.outfp.write("Object counts:\n")
+ pre = [" "]
+ post = [" %s\n"]
+ l = len(self.stats)
+ fmt1 = string.join(pre + [' %7s']*l + post, '')
+ fmt2 = string.join(pre + [' %7d']*l + post, '')
+ labels = self.labels[:l]
+ labels.append(("", "Class"))
+ self.outfp.write(fmt1 % tuple(map(lambda x: x[0], labels)))
+ self.outfp.write(fmt1 % tuple(map(lambda x: x[1], labels)))
+ for k in keys:
+ r = stats_table[k][:l] + [k]
+ self.outfp.write(fmt2 % tuple(r))
+
+count_stats = CountStats()
+
+class MemStats(Stats):
+ def do_append(self, label):
+ self.labels.append(label)
+ self.stats.append(SCons.Debug.memory())
+ def do_print(self):
+ fmt = 'Memory %-32s %12d\n'
+ for label, stats in map(None, self.labels, self.stats):
+ self.outfp.write(fmt % (label, stats))
+
+memory_stats = MemStats()
+
+# utility functions
+
+def _scons_syntax_error(e):
+ """Handle syntax errors. Print out a message and show where the error
+ occurred.
+ """
+ etype, value, tb = sys.exc_info()
+ lines = traceback.format_exception_only(etype, value)
+ for line in lines:
+ sys.stderr.write(line+'\n')
+ sys.exit(2)
+
+def find_deepest_user_frame(tb):
+ """
+ Find the deepest stack frame that is not part of SCons.
+
+ Input is a "pre-processed" stack trace in the form
+ returned by traceback.extract_tb() or traceback.extract_stack()
+ """
+
+ tb.reverse()
+
+ # find the deepest traceback frame that is not part
+ # of SCons:
+ for frame in tb:
+ filename = frame[0]
+ if string.find(filename, os.sep+'SCons'+os.sep) == -1:
+ return frame
+ return tb[0]
+
+def _scons_user_error(e):
+ """Handle user errors. Print out a message and a description of the
+ error, along with the line number and routine where it occured.
+ The file and line number will be the deepest stack frame that is
+ not part of SCons itself.
+ """
+ global print_stacktrace
+ etype, value, tb = sys.exc_info()
+ if print_stacktrace:
+ traceback.print_exception(etype, value, tb)
+ filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
+ sys.stderr.write("\nscons: *** %s\n" % value)
+ sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
+ sys.exit(2)
+
+def _scons_user_warning(e):
+ """Handle user warnings. Print out a message and a description of
+ the warning, along with the line number and routine where it occured.
+ The file and line number will be the deepest stack frame that is
+ not part of SCons itself.
+ """
+ etype, value, tb = sys.exc_info()
+ filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_tb(tb))
+ sys.stderr.write("\nscons: warning: %s\n" % e)
+ sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
+
+def _scons_internal_warning(e):
+ """Slightly different from _scons_user_warning in that we use the
+ *current call stack* rather than sys.exc_info() to get our stack trace.
+ This is used by the warnings framework to print warnings."""
+ filename, lineno, routine, dummy = find_deepest_user_frame(traceback.extract_stack())
+ sys.stderr.write("\nscons: warning: %s\n" % e[0])
+ sys.stderr.write('File "%s", line %d, in %s\n' % (filename, lineno, routine))
+
+def _scons_internal_error():
+ """Handle all errors but user errors. Print out a message telling
+ the user what to do in this case and print a normal trace.
+ """
+ print 'internal error'
+ traceback.print_exc()
+ sys.exit(2)
+
+def _SConstruct_exists(dirname='', repositories=[], filelist=None):
+ """This function checks that an SConstruct file exists in a directory.
+ If so, it returns the path of the file. By default, it checks the
+ current directory.
+ """
+ if not filelist:
+ filelist = ['SConstruct', 'Sconstruct', 'sconstruct']
+ for file in filelist:
+ sfile = os.path.join(dirname, file)
+ if os.path.isfile(sfile):
+ return sfile
+ if not os.path.isabs(sfile):
+ for rep in repositories:
+ if os.path.isfile(os.path.join(rep, sfile)):
+ return sfile
+ return None
+
+def _set_debug_values(options):
+ global print_memoizer, print_objects, print_stacktrace, print_time
+
+ debug_values = options.debug
+
+ if "count" in debug_values:
+ # All of the object counts are within "if __debug__:" blocks,
+ # which get stripped when running optimized (with python -O or
+ # from compiled *.pyo files). Provide a warning if __debug__ is
+ # stripped, so it doesn't just look like --debug=count is broken.
+ enable_count = False
+ if __debug__: enable_count = True
+ if enable_count:
+ count_stats.enable(sys.stdout)
+ else:
+ msg = "--debug=count is not supported when running SCons\n" + \
+ "\twith the python -O option or optimized (.pyo) modules."
+ SCons.Warnings.warn(SCons.Warnings.NoObjectCountWarning, msg)
+ if "dtree" in debug_values:
+ options.tree_printers.append(TreePrinter(derived=True))
+ options.debug_explain = ("explain" in debug_values)
+ if "findlibs" in debug_values:
+ SCons.Scanner.Prog.print_find_libs = "findlibs"
+ options.debug_includes = ("includes" in debug_values)
+ print_memoizer = ("memoizer" in debug_values)
+ if "memory" in debug_values:
+ memory_stats.enable(sys.stdout)
+ print_objects = ("objects" in debug_values)
+ if "presub" in debug_values:
+ SCons.Action.print_actions_presub = 1
+ if "stacktrace" in debug_values:
+ print_stacktrace = 1
+ if "stree" in debug_values:
+ options.tree_printers.append(TreePrinter(status=True))
+ if "time" in debug_values:
+ print_time = 1
+ if "tree" in debug_values:
+ options.tree_printers.append(TreePrinter())
+
+def _create_path(plist):
+ path = '.'
+ for d in plist:
+ if os.path.isabs(d):
+ path = d
+ else:
+ path = path + '/' + d
+ return path
+
+def _load_site_scons_dir(topdir, site_dir_name=None):
+ """Load the site_scons dir under topdir.
+ Adds site_scons to sys.path, imports site_scons/site_init.py,
+ and adds site_scons/site_tools to default toolpath."""
+ if site_dir_name:
+ err_if_not_found = True # user specified: err if missing
+ else:
+ site_dir_name = "site_scons"
+ err_if_not_found = False
+
+ site_dir = os.path.join(topdir.path, site_dir_name)
+ if not os.path.exists(site_dir):
+ if err_if_not_found:
+ raise SCons.Errors.UserError, "site dir %s not found."%site_dir
+ return
+
+ site_init_filename = "site_init.py"
+ site_init_modname = "site_init"
+ site_tools_dirname = "site_tools"
+ sys.path = [os.path.abspath(site_dir)] + sys.path
+ site_init_file = os.path.join(site_dir, site_init_filename)
+ site_tools_dir = os.path.join(site_dir, site_tools_dirname)
+ if os.path.exists(site_init_file):
+ import imp
+ try:
+ fp, pathname, description = imp.find_module(site_init_modname,
+ [site_dir])
+ try:
+ imp.load_module(site_init_modname, fp, pathname, description)
+ finally:
+ if fp:
+ fp.close()
+ except ImportError, e:
+ sys.stderr.write("Can't import site init file '%s': %s\n"%(site_init_file, e))
+ raise
+ except Exception, e:
+ sys.stderr.write("Site init file '%s' raised exception: %s\n"%(site_init_file, e))
+ raise
+ if os.path.exists(site_tools_dir):
+ SCons.Tool.DefaultToolpath.append(os.path.abspath(site_tools_dir))
+
+def version_string(label, module):
+ version = module.__version__
+ build = module.__build__
+ if build:
+ if build[0] != '.':
+ build = '.' + build
+ version = version + build
+ fmt = "\t%s: v%s, %s, by %s on %s\n"
+ return fmt % (label,
+ version,
+ module.__date__,
+ module.__developer__,
+ module.__buildsys__)
+
+def _main(parser):
+ global exit_status
+ global this_build_status
+
+ options = parser.values
+
+ # Here's where everything really happens.
+
+ # First order of business: set up default warnings and then
+ # handle the user's warning options, so that we can issue (or
+ # suppress) appropriate warnings about anything that might happen,
+ # as configured by the user.
+
+ default_warnings = [ SCons.Warnings.CorruptSConsignWarning,
+ SCons.Warnings.DeprecatedWarning,
+ SCons.Warnings.DuplicateEnvironmentWarning,
+ SCons.Warnings.FutureReservedVariableWarning,
+ SCons.Warnings.LinkWarning,
+ SCons.Warnings.MissingSConscriptWarning,
+ SCons.Warnings.NoMD5ModuleWarning,
+ SCons.Warnings.NoMetaclassSupportWarning,
+ SCons.Warnings.NoObjectCountWarning,
+ SCons.Warnings.NoParallelSupportWarning,
+ SCons.Warnings.MisleadingKeywordsWarning,
+ SCons.Warnings.ReservedVariableWarning,
+ SCons.Warnings.StackSizeWarning,
+ ]
+
+ for warning in default_warnings:
+ SCons.Warnings.enableWarningClass(warning)
+ SCons.Warnings._warningOut = _scons_internal_warning
+ SCons.Warnings.process_warn_strings(options.warn)
+
+ # Now that we have the warnings configuration set up, we can actually
+ # issue (or suppress) any warnings about warning-worthy things that
+ # occurred while the command-line options were getting parsed.
+ try:
+ dw = options.delayed_warnings
+ except AttributeError:
+ pass
+ else:
+ delayed_warnings.extend(dw)
+ for warning_type, message in delayed_warnings:
+ SCons.Warnings.warn(warning_type, message)
+
+ if options.diskcheck:
+ SCons.Node.FS.set_diskcheck(options.diskcheck)
+
+ # Next, we want to create the FS object that represents the outside
+ # world's file system, as that's central to a lot of initialization.
+ # To do this, however, we need to be in the directory from which we
+ # want to start everything, which means first handling any relevant
+ # options that might cause us to chdir somewhere (-C, -D, -U, -u).
+ if options.directory:
+ cdir = _create_path(options.directory)
+ try:
+ os.chdir(cdir)
+ except OSError:
+ sys.stderr.write("Could not change directory to %s\n" % cdir)
+
+ target_top = None
+ if options.climb_up:
+ target_top = '.' # directory to prepend to targets
+ script_dir = os.getcwd() # location of script
+ while script_dir and not _SConstruct_exists(script_dir,
+ options.repository,
+ options.file):
+ script_dir, last_part = os.path.split(script_dir)
+ if last_part:
+ target_top = os.path.join(last_part, target_top)
+ else:
+ script_dir = ''
+ if script_dir and script_dir != os.getcwd():
+ display("scons: Entering directory `%s'" % script_dir)
+ os.chdir(script_dir)
+
+ # Now that we're in the top-level SConstruct directory, go ahead
+ # and initialize the FS object that represents the file system,
+ # and make it the build engine default.
+ fs = SCons.Node.FS.get_default_fs()
+
+ for rep in options.repository:
+ fs.Repository(rep)
+
+ # Now that we have the FS object, the next order of business is to
+ # check for an SConstruct file (or other specified config file).
+ # If there isn't one, we can bail before doing any more work.
+ scripts = []
+ if options.file:
+ scripts.extend(options.file)
+ if not scripts:
+ sfile = _SConstruct_exists(repositories=options.repository,
+ filelist=options.file)
+ if sfile:
+ scripts.append(sfile)
+
+ if not scripts:
+ if options.help:
+ # There's no SConstruct, but they specified -h.
+ # Give them the options usage now, before we fail
+ # trying to read a non-existent SConstruct file.
+ raise SConsPrintHelpException
+ raise SCons.Errors.UserError, "No SConstruct file found."
+
+ if scripts[0] == "-":
+ d = fs.getcwd()
+ else:
+ d = fs.File(scripts[0]).dir
+ fs.set_SConstruct_dir(d)
+
+ _set_debug_values(options)
+ SCons.Node.implicit_cache = options.implicit_cache
+ SCons.Node.implicit_deps_changed = options.implicit_deps_changed
+ SCons.Node.implicit_deps_unchanged = options.implicit_deps_unchanged
+
+ if options.no_exec:
+ SCons.SConf.dryrun = 1
+ SCons.Action.execute_actions = None
+ if options.question:
+ SCons.SConf.dryrun = 1
+ if options.clean:
+ SCons.SConf.SetBuildType('clean')
+ if options.help:
+ SCons.SConf.SetBuildType('help')
+ SCons.SConf.SetCacheMode(options.config)
+ SCons.SConf.SetProgressDisplay(progress_display)
+
+ if options.no_progress or options.silent:
+ progress_display.set_mode(0)
+
+ if options.site_dir:
+ _load_site_scons_dir(d, options.site_dir)
+ elif not options.no_site_dir:
+ _load_site_scons_dir(d)
+
+ if options.include_dir:
+ sys.path = options.include_dir + sys.path
+
+ # That should cover (most of) the options. Next, set up the variables
+ # that hold command-line arguments, so the SConscript files that we
+ # read and execute have access to them.
+ targets = []
+ xmit_args = []
+ for a in parser.largs:
+ if a[0] == '-':
+ continue
+ if '=' in a:
+ xmit_args.append(a)
+ else:
+ targets.append(a)
+ SCons.Script._Add_Targets(targets + parser.rargs)
+ SCons.Script._Add_Arguments(xmit_args)
+
+ # If stdout is not a tty, replace it with a wrapper object to call flush
+ # after every write.
+ #
+ # Tty devices automatically flush after every newline, so the replacement
+ # isn't necessary. Furthermore, if we replace sys.stdout, the readline
+ # module will no longer work. This affects the behavior during
+ # --interactive mode. --interactive should only be used when stdin and
+ # stdout refer to a tty.
+ if not sys.stdout.isatty():
+ sys.stdout = SCons.Util.Unbuffered(sys.stdout)
+ if not sys.stderr.isatty():
+ sys.stderr = SCons.Util.Unbuffered(sys.stderr)
+
+ memory_stats.append('before reading SConscript files:')
+ count_stats.append(('pre-', 'read'))
+
+ # And here's where we (finally) read the SConscript files.
+
+ progress_display("scons: Reading SConscript files ...")
+
+ start_time = time.time()
+ try:
+ for script in scripts:
+ SCons.Script._SConscript._SConscript(fs, script)
+ except SCons.Errors.StopError, e:
+ # We had problems reading an SConscript file, such as it
+ # couldn't be copied in to the VariantDir. Since we're just
+ # reading SConscript files and haven't started building
+ # things yet, stop regardless of whether they used -i or -k
+ # or anything else.
+ sys.stderr.write("scons: *** %s Stop.\n" % e)
+ exit_status = 2
+ sys.exit(exit_status)
+ global sconscript_time
+ sconscript_time = time.time() - start_time
+
+ progress_display("scons: done reading SConscript files.")
+
+ memory_stats.append('after reading SConscript files:')
+ count_stats.append(('post-', 'read'))
+
+ # Re-{enable,disable} warnings in case they disabled some in
+ # the SConscript file.
+ #
+ # We delay enabling the PythonVersionWarning class until here so that,
+ # if they explicity disabled it in either in the command line or in
+ # $SCONSFLAGS, or in the SConscript file, then the search through
+ # the list of deprecated warning classes will find that disabling
+ # first and not issue the warning.
+ SCons.Warnings.enableWarningClass(SCons.Warnings.PythonVersionWarning)
+ SCons.Warnings.process_warn_strings(options.warn)
+
+ # Now that we've read the SConscript files, we can check for the
+ # warning about deprecated Python versions--delayed until here
+ # in case they disabled the warning in the SConscript files.
+ if python_version_deprecated():
+ msg = "Support for pre-2.2 Python (%s) is deprecated.\n" + \
+ " If this will cause hardship, contact dev@scons.tigris.org."
+ SCons.Warnings.warn(SCons.Warnings.PythonVersionWarning,
+ msg % python_version_string())
+
+ if not options.help:
+ SCons.SConf.CreateConfigHBuilder(SCons.Defaults.DefaultEnvironment())
+
+ # Now re-parse the command-line options (any to the left of a '--'
+ # argument, that is) with any user-defined command-line options that
+ # the SConscript files may have added to the parser object. This will
+ # emit the appropriate error message and exit if any unknown option
+ # was specified on the command line.
+
+ parser.preserve_unknown_options = False
+ parser.parse_args(parser.largs, options)
+
+ if options.help:
+ help_text = SCons.Script.help_text
+ if help_text is None:
+ # They specified -h, but there was no Help() inside the
+ # SConscript files. Give them the options usage.
+ raise SConsPrintHelpException
+ else:
+ print help_text
+ print "Use scons -H for help about command-line options."
+ exit_status = 0
+ return
+
+ # Change directory to the top-level SConstruct directory, then tell
+ # the Node.FS subsystem that we're all done reading the SConscript
+ # files and calling Repository() and VariantDir() and changing
+ # directories and the like, so it can go ahead and start memoizing
+ # the string values of file system nodes.
+
+ fs.chdir(fs.Top)
+
+ SCons.Node.FS.save_strings(1)
+
+ # Now that we've read the SConscripts we can set the options
+ # that are SConscript settable:
+ SCons.Node.implicit_cache = options.implicit_cache
+ SCons.Node.FS.set_duplicate(options.duplicate)
+ fs.set_max_drift(options.max_drift)
+
+ SCons.Job.explicit_stack_size = options.stack_size
+
+ if options.md5_chunksize:
+ SCons.Node.FS.File.md5_chunksize = options.md5_chunksize
+
+ platform = SCons.Platform.platform_module()
+
+ if options.interactive:
+ SCons.Script.Interactive.interact(fs, OptionsParser, options,
+ targets, target_top)
+
+ else:
+
+ # Build the targets
+ nodes = _build_targets(fs, options, targets, target_top)
+ if not nodes:
+ exit_status = 2
+
+def _build_targets(fs, options, targets, target_top):
+
+ global this_build_status
+ this_build_status = 0
+
+ progress_display.set_mode(not (options.no_progress or options.silent))
+ display.set_mode(not options.silent)
+ SCons.Action.print_actions = not options.silent
+ SCons.Action.execute_actions = not options.no_exec
+ SCons.Node.FS.do_store_info = not options.no_exec
+ SCons.SConf.dryrun = options.no_exec
+
+ if options.diskcheck:
+ SCons.Node.FS.set_diskcheck(options.diskcheck)
+
+ SCons.CacheDir.cache_enabled = not options.cache_disable
+ SCons.CacheDir.cache_debug = options.cache_debug
+ SCons.CacheDir.cache_force = options.cache_force
+ SCons.CacheDir.cache_show = options.cache_show
+
+ if options.no_exec:
+ CleanTask.execute = CleanTask.show
+ else:
+ CleanTask.execute = CleanTask.remove
+
+ lookup_top = None
+ if targets or SCons.Script.BUILD_TARGETS != SCons.Script._build_plus_default:
+ # They specified targets on the command line or modified
+ # BUILD_TARGETS in the SConscript file(s), so if they used -u,
+ # -U or -D, we have to look up targets relative to the top,
+ # but we build whatever they specified.
+ if target_top:
+ lookup_top = fs.Dir(target_top)
+ target_top = None
+
+ targets = SCons.Script.BUILD_TARGETS
+ else:
+ # There are no targets specified on the command line,
+ # so if they used -u, -U or -D, we may have to restrict
+ # what actually gets built.
+ d = None
+ if target_top:
+ if options.climb_up == 1:
+ # -u, local directory and below
+ target_top = fs.Dir(target_top)
+ lookup_top = target_top
+ elif options.climb_up == 2:
+ # -D, all Default() targets
+ target_top = None
+ lookup_top = None
+ elif options.climb_up == 3:
+ # -U, local SConscript Default() targets
+ target_top = fs.Dir(target_top)
+ def check_dir(x, target_top=target_top):
+ if hasattr(x, 'cwd') and not x.cwd is None:
+ cwd = x.cwd.srcnode()
+ return cwd == target_top
+ else:
+ # x doesn't have a cwd, so it's either not a target,
+ # or not a file, so go ahead and keep it as a default
+ # target and let the engine sort it out:
+ return 1
+ d = filter(check_dir, SCons.Script.DEFAULT_TARGETS)
+ SCons.Script.DEFAULT_TARGETS[:] = d
+ target_top = None
+ lookup_top = None
+
+ targets = SCons.Script._Get_Default_Targets(d, fs)
+
+ if not targets:
+ sys.stderr.write("scons: *** No targets specified and no Default() targets found. Stop.\n")
+ return None
+
+ def Entry(x, ltop=lookup_top, ttop=target_top, fs=fs):
+ if isinstance(x, SCons.Node.Node):
+ node = x
+ else:
+ node = None
+ # Why would ltop be None? Unfortunately this happens.
+ if ltop == None: ltop = ''
+ # Curdir becomes important when SCons is called with -u, -C,
+ # or similar option that changes directory, and so the paths
+ # of targets given on the command line need to be adjusted.
+ curdir = os.path.join(os.getcwd(), str(ltop))
+ for lookup in SCons.Node.arg2nodes_lookups:
+ node = lookup(x, curdir=curdir)
+ if node != None:
+ break
+ if node is None:
+ node = fs.Entry(x, directory=ltop, create=1)
+ if ttop and not node.is_under(ttop):
+ if isinstance(node, SCons.Node.FS.Dir) and ttop.is_under(node):
+ node = ttop
+ else:
+ node = None
+ return node
+
+ nodes = filter(None, map(Entry, targets))
+
+ task_class = BuildTask # default action is to build targets
+ opening_message = "Building targets ..."
+ closing_message = "done building targets."
+ if options.keep_going:
+ failure_message = "done building targets (errors occurred during build)."
+ else:
+ failure_message = "building terminated because of errors."
+ if options.question:
+ task_class = QuestionTask
+ try:
+ if options.clean:
+ task_class = CleanTask
+ opening_message = "Cleaning targets ..."
+ closing_message = "done cleaning targets."
+ if options.keep_going:
+ failure_message = "done cleaning targets (errors occurred during clean)."
+ else:
+ failure_message = "cleaning terminated because of errors."
+ except AttributeError:
+ pass
+
+ task_class.progress = ProgressObject
+
+ if options.random:
+ def order(dependencies):
+ """Randomize the dependencies."""
+ import random
+ # This is cribbed from the implementation of
+ # random.shuffle() in Python 2.X.
+ d = dependencies
+ for i in xrange(len(d)-1, 0, -1):
+ j = int(random.random() * (i+1))
+ d[i], d[j] = d[j], d[i]
+ return d
+ else:
+ def order(dependencies):
+ """Leave the order of dependencies alone."""
+ return dependencies
+
+ if options.taskmastertrace_file == '-':
+ tmtrace = sys.stdout
+ elif options.taskmastertrace_file:
+ tmtrace = open(options.taskmastertrace_file, 'wb')
+ else:
+ tmtrace = None
+ taskmaster = SCons.Taskmaster.Taskmaster(nodes, task_class, order, tmtrace)
+
+ # Let the BuildTask objects get at the options to respond to the
+ # various print_* settings, tree_printer list, etc.
+ BuildTask.options = options
+
+ global num_jobs
+ num_jobs = options.num_jobs
+ jobs = SCons.Job.Jobs(num_jobs, taskmaster)
+ if num_jobs > 1:
+ msg = None
+ if jobs.num_jobs == 1:
+ msg = "parallel builds are unsupported by this version of Python;\n" + \
+ "\tignoring -j or num_jobs option.\n"
+ elif sys.platform == 'win32':
+ msg = fetch_win32_parallel_msg()
+ if msg:
+ SCons.Warnings.warn(SCons.Warnings.NoParallelSupportWarning, msg)
+
+ memory_stats.append('before building targets:')
+ count_stats.append(('pre-', 'build'))
+
+ def jobs_postfunc(
+ jobs=jobs,
+ options=options,
+ closing_message=closing_message,
+ failure_message=failure_message
+ ):
+ if jobs.were_interrupted():
+ progress_display("scons: Build interrupted.")
+ global exit_status
+ global this_build_status
+ exit_status = 2
+ this_build_status = 2
+
+ if this_build_status:
+ progress_display("scons: " + failure_message)
+ else:
+ progress_display("scons: " + closing_message)
+ if not options.no_exec:
+ if jobs.were_interrupted():
+ progress_display("scons: writing .sconsign file.")
+ SCons.SConsign.write()
+
+ progress_display("scons: " + opening_message)
+ jobs.run(postfunc = jobs_postfunc)
+
+ memory_stats.append('after building targets:')
+ count_stats.append(('post-', 'build'))
+
+ return nodes
+
+def _exec_main(parser, values):
+ sconsflags = os.environ.get('SCONSFLAGS', '')
+ all_args = string.split(sconsflags) + sys.argv[1:]
+
+ options, args = parser.parse_args(all_args, values)
+
+ if type(options.debug) == type([]) and "pdb" in options.debug:
+ import pdb
+ pdb.Pdb().runcall(_main, parser)
+ elif options.profile_file:
+ try:
+ from cProfile import Profile
+ except ImportError, e:
+ from profile import Profile
+
+ # Some versions of Python 2.4 shipped a profiler that had the
+ # wrong 'c_exception' entry in its dispatch table. Make sure
+ # we have the right one. (This may put an unnecessary entry
+ # in the table in earlier versions of Python, but its presence
+ # shouldn't hurt anything).
+ try:
+ dispatch = Profile.dispatch
+ except AttributeError:
+ pass
+ else:
+ dispatch['c_exception'] = Profile.trace_dispatch_return
+
+ prof = Profile()
+ try:
+ prof.runcall(_main, parser)
+ except SConsPrintHelpException, e:
+ prof.dump_stats(options.profile_file)
+ raise e
+ except SystemExit:
+ pass
+ prof.dump_stats(options.profile_file)
+ else:
+ _main(parser)
+
+def main():
+ global OptionsParser
+ global exit_status
+ global first_command_start
+
+ # Check up front for a Python version we do not support. We
+ # delay the check for deprecated Python versions until later,
+ # after the SConscript files have been read, in case they
+ # disable that warning.
+ if python_version_unsupported():
+ msg = "scons: *** SCons version %s does not run under Python version %s.\n"
+ sys.stderr.write(msg % (SCons.__version__, python_version_string()))
+ sys.exit(1)
+
+ parts = ["SCons by Steven Knight et al.:\n"]
+ try:
+ import __main__
+ parts.append(version_string("script", __main__))
+ except (ImportError, AttributeError):
+ # On Windows there is no scons.py, so there is no
+ # __main__.__version__, hence there is no script version.
+ pass
+ parts.append(version_string("engine", SCons))
+ parts.append("Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation")
+ version = string.join(parts, '')
+
+ import SConsOptions
+ parser = SConsOptions.Parser(version)
+ values = SConsOptions.SConsValues(parser.get_default_values())
+
+ OptionsParser = parser
+
+ try:
+ _exec_main(parser, values)
+ except SystemExit, s:
+ if s:
+ exit_status = s
+ except KeyboardInterrupt:
+ print("scons: Build interrupted.")
+ sys.exit(2)
+ except SyntaxError, e:
+ _scons_syntax_error(e)
+ except SCons.Errors.InternalError:
+ _scons_internal_error()
+ except SCons.Errors.UserError, e:
+ _scons_user_error(e)
+ except SConsPrintHelpException:
+ parser.print_help()
+ exit_status = 0
+ except SCons.Errors.BuildError, e:
+ exit_status = e.exitstatus
+ except:
+ # An exception here is likely a builtin Python exception Python
+ # code in an SConscript file. Show them precisely what the
+ # problem was and where it happened.
+ SCons.Script._SConscript.SConscript_exception()
+ sys.exit(2)
+
+ memory_stats.print_stats()
+ count_stats.print_stats()
+
+ if print_objects:
+ SCons.Debug.listLoggedInstances('*')
+ #SCons.Debug.dumpLoggedInstances('*')
+
+ if print_memoizer:
+ SCons.Memoize.Dump("Memoizer (memory cache) hits and misses:")
+
+ # Dump any development debug info that may have been enabled.
+ # These are purely for internal debugging during development, so
+ # there's no need to control them with --debug= options; they're
+ # controlled by changing the source code.
+ SCons.Debug.dump_caller_counts()
+ SCons.Taskmaster.dump_stats()
+
+ if print_time:
+ total_time = time.time() - SCons.Script.start_time
+ if num_jobs == 1:
+ ct = cumulative_command_time
+ else:
+ if last_command_end is None or first_command_start is None:
+ ct = 0.0
+ else:
+ ct = last_command_end - first_command_start
+ scons_time = total_time - sconscript_time - ct
+ print "Total build time: %f seconds"%total_time
+ print "Total SConscript file execution time: %f seconds"%sconscript_time
+ print "Total SCons execution time: %f seconds"%scons_time
+ print "Total command execution time: %f seconds"%ct
+
+ sys.exit(exit_status)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Script/SConsOptions.py b/tools/scons/scons-local-1.2.0/SCons/Script/SConsOptions.py
new file mode 100644
index 000000000..636fd2024
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Script/SConsOptions.py
@@ -0,0 +1,940 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Script/SConsOptions.py 3842 2008/12/20 22:59:52 scons"
+
+import optparse
+import re
+import string
+import sys
+import textwrap
+
+try:
+ no_hyphen_re = re.compile(r'(\s+|(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))')
+except re.error:
+ # Pre-2.0 Python versions don't have the (?<= negative
+ # look-behind assertion.
+ no_hyphen_re = re.compile(r'(\s+|-*\w{2,}-(?=\w{2,}))')
+
+try:
+ from gettext import gettext
+except ImportError:
+ def gettext(message):
+ return message
+_ = gettext
+
+import SCons.Node.FS
+import SCons.Warnings
+
+OptionValueError = optparse.OptionValueError
+SUPPRESS_HELP = optparse.SUPPRESS_HELP
+
+diskcheck_all = SCons.Node.FS.diskcheck_types()
+
+def diskcheck_convert(value):
+ if value is None:
+ return []
+ if not SCons.Util.is_List(value):
+ value = string.split(value, ',')
+ result = []
+ for v in map(string.lower, value):
+ if v == 'all':
+ result = diskcheck_all
+ elif v == 'none':
+ result = []
+ elif v in diskcheck_all:
+ result.append(v)
+ else:
+ raise ValueError, v
+ return result
+
+class SConsValues(optparse.Values):
+ """
+ Holder class for uniform access to SCons options, regardless
+ of whether or not they can be set on the command line or in the
+ SConscript files (using the SetOption() function).
+
+ A SCons option value can originate three different ways:
+
+ 1) set on the command line;
+ 2) set in an SConscript file;
+ 3) the default setting (from the the op.add_option()
+ calls in the Parser() function, below).
+
+ The command line always overrides a value set in a SConscript file,
+ which in turn always overrides default settings. Because we want
+ to support user-specified options in the SConscript file itself,
+ though, we may not know about all of the options when the command
+ line is first parsed, so we can't make all the necessary precedence
+ decisions at the time the option is configured.
+
+ The solution implemented in this class is to keep these different sets
+ of settings separate (command line, SConscript file, and default)
+ and to override the __getattr__() method to check them in turn.
+ This should allow the rest of the code to just fetch values as
+ attributes of an instance of this class, without having to worry
+ about where they came from.
+
+ Note that not all command line options are settable from SConscript
+ files, and the ones that are must be explicitly added to the
+ "settable" list in this class, and optionally validated and coerced
+ in the set_option() method.
+ """
+
+ def __init__(self, defaults):
+ self.__dict__['__defaults__'] = defaults
+ self.__dict__['__SConscript_settings__'] = {}
+
+ def __getattr__(self, attr):
+ """
+ Fetches an options value, checking first for explicit settings
+ from the command line (which are direct attributes), then the
+ SConscript file settings, then the default values.
+ """
+ try:
+ return self.__dict__[attr]
+ except KeyError:
+ try:
+ return self.__dict__['__SConscript_settings__'][attr]
+ except KeyError:
+ return getattr(self.__dict__['__defaults__'], attr)
+
+ settable = [
+ 'clean',
+ 'diskcheck',
+ 'duplicate',
+ 'help',
+ 'implicit_cache',
+ 'max_drift',
+ 'md5_chunksize',
+ 'no_exec',
+ 'num_jobs',
+ 'random',
+ 'stack_size',
+ 'warn',
+ ]
+
+ def set_option(self, name, value):
+ """
+ Sets an option from an SConscript file.
+ """
+ if not name in self.settable:
+ raise SCons.Errors.UserError, "This option is not settable from a SConscript file: %s"%name
+
+ if name == 'num_jobs':
+ try:
+ value = int(value)
+ if value < 1:
+ raise ValueError
+ except ValueError:
+ raise SCons.Errors.UserError, "A positive integer is required: %s"%repr(value)
+ elif name == 'max_drift':
+ try:
+ value = int(value)
+ except ValueError:
+ raise SCons.Errors.UserError, "An integer is required: %s"%repr(value)
+ elif name == 'duplicate':
+ try:
+ value = str(value)
+ except ValueError:
+ raise SCons.Errors.UserError, "A string is required: %s"%repr(value)
+ if not value in SCons.Node.FS.Valid_Duplicates:
+ raise SCons.Errors.UserError, "Not a valid duplication style: %s" % value
+ # Set the duplicate style right away so it can affect linking
+ # of SConscript files.
+ SCons.Node.FS.set_duplicate(value)
+ elif name == 'diskcheck':
+ try:
+ value = diskcheck_convert(value)
+ except ValueError, v:
+ raise SCons.Errors.UserError, "Not a valid diskcheck value: %s"%v
+ if not self.__dict__.has_key('diskcheck'):
+ # No --diskcheck= option was specified on the command line.
+ # Set this right away so it can affect the rest of the
+ # file/Node lookups while processing the SConscript files.
+ SCons.Node.FS.set_diskcheck(value)
+ elif name == 'stack_size':
+ try:
+ value = int(value)
+ except ValueError:
+ raise SCons.Errors.UserError, "An integer is required: %s"%repr(value)
+ elif name == 'md5_chunksize':
+ try:
+ value = int(value)
+ except ValueError:
+ raise SCons.Errors.UserError, "An integer is required: %s"%repr(value)
+ elif name == 'warn':
+ if SCons.Util.is_String(value):
+ value = [value]
+ value = self.__SConscript_settings__.get(name, []) + value
+ SCons.Warnings.process_warn_strings(value)
+
+ self.__SConscript_settings__[name] = value
+
+class SConsOption(optparse.Option):
+ def convert_value(self, opt, value):
+ if value is not None:
+ if self.nargs in (1, '?'):
+ return self.check_value(opt, value)
+ else:
+ return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value))
+
+ def process(self, opt, value, values, parser):
+
+ # First, convert the value(s) to the right type. Howl if any
+ # value(s) are bogus.
+ value = self.convert_value(opt, value)
+
+ # And then take whatever action is expected of us.
+ # This is a separate method to make life easier for
+ # subclasses to add new actions.
+ return self.take_action(
+ self.action, self.dest, opt, value, values, parser)
+
+ def _check_nargs_optional(self):
+ if self.nargs == '?' and self._short_opts:
+ fmt = "option %s: nargs='?' is incompatible with short options"
+ raise SCons.Errors.UserError, fmt % self._short_opts[0]
+
+ try:
+ _orig_CONST_ACTIONS = optparse.Option.CONST_ACTIONS
+
+ _orig_CHECK_METHODS = optparse.Option.CHECK_METHODS
+
+ except AttributeError:
+ # optparse.Option had no CONST_ACTIONS before Python 2.5.
+
+ _orig_CONST_ACTIONS = ("store_const",)
+
+ def _check_const(self):
+ if self.action not in self.CONST_ACTIONS and self.const is not None:
+ raise OptionError(
+ "'const' must not be supplied for action %r" % self.action,
+ self)
+
+ # optparse.Option collects its list of unbound check functions
+ # up front. This sucks because it means we can't just override
+ # the _check_const() function like a normal method, we have to
+ # actually replace it in the list. This seems to be the most
+ # straightforward way to do that.
+
+ _orig_CHECK_METHODS = [optparse.Option._check_action,
+ optparse.Option._check_type,
+ optparse.Option._check_choice,
+ optparse.Option._check_dest,
+ _check_const,
+ optparse.Option._check_nargs,
+ optparse.Option._check_callback]
+
+ CHECK_METHODS = _orig_CHECK_METHODS + [_check_nargs_optional]
+
+ CONST_ACTIONS = _orig_CONST_ACTIONS + optparse.Option.TYPED_ACTIONS
+
+class SConsOptionGroup(optparse.OptionGroup):
+ """
+ A subclass for SCons-specific option groups.
+
+ The only difference between this and the base class is that we print
+ the group's help text flush left, underneath their own title but
+ lined up with the normal "SCons Options".
+ """
+ def format_help(self, formatter):
+ """
+ Format an option group's help text, outdenting the title so it's
+ flush with the "SCons Options" title we print at the top.
+ """
+ formatter.dedent()
+ result = formatter.format_heading(self.title)
+ formatter.indent()
+ result = result + optparse.OptionContainer.format_help(self, formatter)
+ return result
+
+class SConsOptionParser(optparse.OptionParser):
+ preserve_unknown_options = False
+
+ def error(self, msg):
+ self.print_usage(sys.stderr)
+ sys.stderr.write("SCons error: %s\n" % msg)
+ sys.exit(2)
+
+ def _process_long_opt(self, rargs, values):
+ """
+ SCons-specific processing of long options.
+
+ This is copied directly from the normal
+ optparse._process_long_opt() method, except that, if configured
+ to do so, we catch the exception thrown when an unknown option
+ is encountered and just stick it back on the "leftover" arguments
+ for later (re-)processing.
+ """
+ arg = rargs.pop(0)
+
+ # Value explicitly attached to arg? Pretend it's the next
+ # argument.
+ if "=" in arg:
+ (opt, next_arg) = string.split(arg, "=", 1)
+ rargs.insert(0, next_arg)
+ had_explicit_value = True
+ else:
+ opt = arg
+ had_explicit_value = False
+
+ try:
+ opt = self._match_long_opt(opt)
+ except optparse.BadOptionError:
+ if self.preserve_unknown_options:
+ # SCons-specific: if requested, add unknown options to
+ # the "leftover arguments" list for later processing.
+ self.largs.append(arg)
+ if had_explicit_value:
+ # The unknown option will be re-processed later,
+ # so undo the insertion of the explicit value.
+ rargs.pop(0)
+ return
+ raise
+
+ option = self._long_opt[opt]
+ if option.takes_value():
+ nargs = option.nargs
+ if nargs == '?':
+ if had_explicit_value:
+ value = rargs.pop(0)
+ else:
+ value = option.const
+ elif len(rargs) < nargs:
+ if nargs == 1:
+ self.error(_("%s option requires an argument") % opt)
+ else:
+ self.error(_("%s option requires %d arguments")
+ % (opt, nargs))
+ elif nargs == 1:
+ value = rargs.pop(0)
+ else:
+ value = tuple(rargs[0:nargs])
+ del rargs[0:nargs]
+
+ elif had_explicit_value:
+ self.error(_("%s option does not take a value") % opt)
+
+ else:
+ value = None
+
+ option.process(opt, value, values, self)
+
+ def add_local_option(self, *args, **kw):
+ """
+ Adds a local option to the parser.
+
+ This is initiated by a SetOption() call to add a user-defined
+ command-line option. We add the option to a separate option
+ group for the local options, creating the group if necessary.
+ """
+ try:
+ group = self.local_option_group
+ except AttributeError:
+ group = SConsOptionGroup(self, 'Local Options')
+ group = self.add_option_group(group)
+ self.local_option_group = group
+
+ result = apply(group.add_option, args, kw)
+
+ if result:
+ # The option was added succesfully. We now have to add the
+ # default value to our object that holds the default values
+ # (so that an attempt to fetch the option's attribute will
+ # yield the default value when not overridden) and then
+ # we re-parse the leftover command-line options, so that
+ # any value overridden on the command line is immediately
+ # available if the user turns around and does a GetOption()
+ # right away.
+ setattr(self.values.__defaults__, result.dest, result.default)
+ self.parse_args(self.largs, self.values)
+
+ return result
+
+class SConsIndentedHelpFormatter(optparse.IndentedHelpFormatter):
+ def format_usage(self, usage):
+ return "usage: %s\n" % usage
+
+ def format_heading(self, heading):
+ """
+ This translates any heading of "options" or "Options" into
+ "SCons Options." Unfortunately, we have to do this here,
+ because those titles are hard-coded in the optparse calls.
+ """
+ if heading == 'options':
+ # The versions of optparse.py shipped with Pythons 2.3 and
+ # 2.4 pass this in uncapitalized; override that so we get
+ # consistent output on all versions.
+ heading = "Options"
+ if heading == 'Options':
+ heading = "SCons Options"
+ return optparse.IndentedHelpFormatter.format_heading(self, heading)
+
+ def format_option(self, option):
+ """
+ A copy of the normal optparse.IndentedHelpFormatter.format_option()
+ method. This has been snarfed so we can modify text wrapping to
+ out liking:
+
+ -- add our own regular expression that doesn't break on hyphens
+ (so things like --no-print-directory don't get broken);
+
+ -- wrap the list of options themselves when it's too long
+ (the wrapper.fill(opts) call below);
+
+ -- set the subsequent_indent when wrapping the help_text.
+ """
+ # The help for each option consists of two parts:
+ # * the opt strings and metavars
+ # eg. ("-x", or "-fFILENAME, --file=FILENAME")
+ # * the user-supplied help string
+ # eg. ("turn on expert mode", "read data from FILENAME")
+ #
+ # If possible, we write both of these on the same line:
+ # -x turn on expert mode
+ #
+ # But if the opt string list is too long, we put the help
+ # string on a second line, indented to the same column it would
+ # start in if it fit on the first line.
+ # -fFILENAME, --file=FILENAME
+ # read data from FILENAME
+ result = []
+
+ try:
+ opts = self.option_strings[option]
+ except AttributeError:
+ # The Python 2.3 version of optparse attaches this to
+ # to the option argument, not to this object.
+ opts = option.option_strings
+
+ opt_width = self.help_position - self.current_indent - 2
+ if len(opts) > opt_width:
+ wrapper = textwrap.TextWrapper(width=self.width,
+ initial_indent = ' ',
+ subsequent_indent = ' ')
+ wrapper.wordsep_re = no_hyphen_re
+ opts = wrapper.fill(opts) + '\n'
+ indent_first = self.help_position
+ else: # start help on same line as opts
+ opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts)
+ indent_first = 0
+ result.append(opts)
+ if option.help:
+
+ try:
+ expand_default = self.expand_default
+ except AttributeError:
+ # The HelpFormatter base class in the Python 2.3 version
+ # of optparse has no expand_default() method.
+ help_text = option.help
+ else:
+ help_text = expand_default(option)
+
+ # SCons: indent every line of the help text but the first.
+ wrapper = textwrap.TextWrapper(width=self.help_width,
+ subsequent_indent = ' ')
+ wrapper.wordsep_re = no_hyphen_re
+ help_lines = wrapper.wrap(help_text)
+ result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
+ for line in help_lines[1:]:
+ result.append("%*s%s\n" % (self.help_position, "", line))
+ elif opts[-1] != "\n":
+ result.append("\n")
+ return string.join(result, "")
+
+ # For consistent help output across Python versions, we provide a
+ # subclass copy of format_option_strings() and these two variables.
+ # This is necessary (?) for Python2.3, which otherwise concatenates
+ # a short option with its metavar.
+ _short_opt_fmt = "%s %s"
+ _long_opt_fmt = "%s=%s"
+
+ def format_option_strings(self, option):
+ """Return a comma-separated list of option strings & metavariables."""
+ if option.takes_value():
+ metavar = option.metavar or string.upper(option.dest)
+ short_opts = []
+ for sopt in option._short_opts:
+ short_opts.append(self._short_opt_fmt % (sopt, metavar))
+ long_opts = []
+ for lopt in option._long_opts:
+ long_opts.append(self._long_opt_fmt % (lopt, metavar))
+ else:
+ short_opts = option._short_opts
+ long_opts = option._long_opts
+
+ if self.short_first:
+ opts = short_opts + long_opts
+ else:
+ opts = long_opts + short_opts
+
+ return string.join(opts, ", ")
+
+def Parser(version):
+ """
+ Returns an options parser object initialized with the standard
+ SCons options.
+ """
+
+ formatter = SConsIndentedHelpFormatter(max_help_position=30)
+
+ op = SConsOptionParser(option_class=SConsOption,
+ add_help_option=False,
+ formatter=formatter,
+ usage="usage: scons [OPTION] [TARGET] ...",)
+
+ op.preserve_unknown_options = True
+ op.version = version
+
+ # Add the options to the parser we just created.
+ #
+ # These are in the order we want them to show up in the -H help
+ # text, basically alphabetical. Each op.add_option() call below
+ # should have a consistent format:
+ #
+ # op.add_option("-L", "--long-option-name",
+ # nargs=1, type="string",
+ # dest="long_option_name", default='foo',
+ # action="callback", callback=opt_long_option,
+ # help="help text goes here",
+ # metavar="VAR")
+ #
+ # Even though the optparse module constructs reasonable default
+ # destination names from the long option names, we're going to be
+ # explicit about each one for easier readability and so this code
+ # will at least show up when grepping the source for option attribute
+ # names, or otherwise browsing the source code.
+
+ # options ignored for compatibility
+ def opt_ignore(option, opt, value, parser):
+ sys.stderr.write("Warning: ignoring %s option\n" % opt)
+ op.add_option("-b", "-d", "-e", "-m", "-S", "-t", "-w",
+ "--environment-overrides",
+ "--no-keep-going",
+ "--no-print-directory",
+ "--print-directory",
+ "--stop",
+ "--touch",
+ action="callback", callback=opt_ignore,
+ help="Ignored for compatibility.")
+
+ op.add_option('-c', '--clean', '--remove',
+ dest="clean", default=False,
+ action="store_true",
+ help="Remove specified targets and dependencies.")
+
+ op.add_option('-C', '--directory',
+ nargs=1, type="string",
+ dest="directory", default=[],
+ action="append",
+ help="Change to DIR before doing anything.",
+ metavar="DIR")
+
+ op.add_option('--cache-debug',
+ nargs=1,
+ dest="cache_debug", default=None,
+ action="store",
+ help="Print CacheDir debug info to FILE.",
+ metavar="FILE")
+
+ op.add_option('--cache-disable', '--no-cache',
+ dest='cache_disable', default=False,
+ action="store_true",
+ help="Do not retrieve built targets from CacheDir.")
+
+ op.add_option('--cache-force', '--cache-populate',
+ dest='cache_force', default=False,
+ action="store_true",
+ help="Copy already-built targets into the CacheDir.")
+
+ op.add_option('--cache-show',
+ dest='cache_show', default=False,
+ action="store_true",
+ help="Print build actions for files from CacheDir.")
+
+ config_options = ["auto", "force" ,"cache"]
+
+ def opt_config(option, opt, value, parser, c_options=config_options):
+ if not value in c_options:
+ raise OptionValueError("Warning: %s is not a valid config type" % value)
+ setattr(parser.values, option.dest, value)
+ opt_config_help = "Controls Configure subsystem: %s." \
+ % string.join(config_options, ", ")
+ op.add_option('--config',
+ nargs=1, type="string",
+ dest="config", default="auto",
+ action="callback", callback=opt_config,
+ help = opt_config_help,
+ metavar="MODE")
+
+ op.add_option('-D',
+ dest="climb_up", default=None,
+ action="store_const", const=2,
+ help="Search up directory tree for SConstruct, "
+ "build all Default() targets.")
+
+ deprecated_debug_options = {
+ "dtree" : '; please use --tree=derived instead',
+ "nomemoizer" : ' and has no effect',
+ "stree" : '; please use --tree=all,status instead',
+ "tree" : '; please use --tree=all instead',
+ }
+
+ debug_options = ["count", "explain", "findlibs",
+ "includes", "memoizer", "memory", "objects",
+ "pdb", "presub", "stacktrace",
+ "time"] + deprecated_debug_options.keys()
+
+ def opt_debug(option, opt, value, parser,
+ debug_options=debug_options,
+ deprecated_debug_options=deprecated_debug_options):
+ if value in debug_options:
+ parser.values.debug.append(value)
+ if value in deprecated_debug_options.keys():
+ try:
+ parser.values.delayed_warnings
+ except AttributeError:
+ parser.values.delayed_warnings = []
+ msg = deprecated_debug_options[value]
+ w = "The --debug=%s option is deprecated%s." % (value, msg)
+ t = (SCons.Warnings.DeprecatedWarning, w)
+ parser.values.delayed_warnings.append(t)
+ else:
+ raise OptionValueError("Warning: %s is not a valid debug type" % value)
+ opt_debug_help = "Print various types of debugging information: %s." \
+ % string.join(debug_options, ", ")
+ op.add_option('--debug',
+ nargs=1, type="string",
+ dest="debug", default=[],
+ action="callback", callback=opt_debug,
+ help=opt_debug_help,
+ metavar="TYPE")
+
+ def opt_diskcheck(option, opt, value, parser):
+ try:
+ diskcheck_value = diskcheck_convert(value)
+ except ValueError, e:
+ raise OptionValueError("Warning: `%s' is not a valid diskcheck type" % e)
+ setattr(parser.values, option.dest, diskcheck_value)
+
+ op.add_option('--diskcheck',
+ nargs=1, type="string",
+ dest='diskcheck', default=None,
+ action="callback", callback=opt_diskcheck,
+ help="Enable specific on-disk checks.",
+ metavar="TYPE")
+
+ def opt_duplicate(option, opt, value, parser):
+ if not value in SCons.Node.FS.Valid_Duplicates:
+ raise OptionValueError("`%s' is not a valid duplication style." % value)
+ setattr(parser.values, option.dest, value)
+ # Set the duplicate style right away so it can affect linking
+ # of SConscript files.
+ SCons.Node.FS.set_duplicate(value)
+
+ opt_duplicate_help = "Set the preferred duplication methods. Must be one of " \
+ + string.join(SCons.Node.FS.Valid_Duplicates, ", ")
+
+ op.add_option('--duplicate',
+ nargs=1, type="string",
+ dest="duplicate", default='hard-soft-copy',
+ action="callback", callback=opt_duplicate,
+ help=opt_duplicate_help)
+
+ op.add_option('-f', '--file', '--makefile', '--sconstruct',
+ nargs=1, type="string",
+ dest="file", default=[],
+ action="append",
+ help="Read FILE as the top-level SConstruct file.")
+
+ op.add_option('-h', '--help',
+ dest="help", default=False,
+ action="store_true",
+ help="Print defined help message, or this one.")
+
+ op.add_option("-H", "--help-options",
+ action="help",
+ help="Print this message and exit.")
+
+ op.add_option('-i', '--ignore-errors',
+ dest='ignore_errors', default=False,
+ action="store_true",
+ help="Ignore errors from build actions.")
+
+ op.add_option('-I', '--include-dir',
+ nargs=1,
+ dest='include_dir', default=[],
+ action="append",
+ help="Search DIR for imported Python modules.",
+ metavar="DIR")
+
+ op.add_option('--implicit-cache',
+ dest='implicit_cache', default=False,
+ action="store_true",
+ help="Cache implicit dependencies")
+
+ def opt_implicit_deps(option, opt, value, parser):
+ setattr(parser.values, 'implicit_cache', True)
+ setattr(parser.values, option.dest, True)
+
+ op.add_option('--implicit-deps-changed',
+ dest="implicit_deps_changed", default=False,
+ action="callback", callback=opt_implicit_deps,
+ help="Ignore cached implicit dependencies.")
+
+ op.add_option('--implicit-deps-unchanged',
+ dest="implicit_deps_unchanged", default=False,
+ action="callback", callback=opt_implicit_deps,
+ help="Ignore changes in implicit dependencies.")
+
+ op.add_option('--interact', '--interactive',
+ dest='interactive', default=False,
+ action="store_true",
+ help="Run in interactive mode.")
+
+ op.add_option('-j', '--jobs',
+ nargs=1, type="int",
+ dest="num_jobs", default=1,
+ action="store",
+ help="Allow N jobs at once.",
+ metavar="N")
+
+ op.add_option('-k', '--keep-going',
+ dest='keep_going', default=False,
+ action="store_true",
+ help="Keep going when a target can't be made.")
+
+ op.add_option('--max-drift',
+ nargs=1, type="int",
+ dest='max_drift', default=SCons.Node.FS.default_max_drift,
+ action="store",
+ help="Set maximum system clock drift to N seconds.",
+ metavar="N")
+
+ op.add_option('--md5-chunksize',
+ nargs=1, type="int",
+ dest='md5_chunksize', default=SCons.Node.FS.File.md5_chunksize,
+ action="store",
+ help="Set chunk-size for MD5 signature computation to N kilobytes.",
+ metavar="N")
+
+ op.add_option('-n', '--no-exec', '--just-print', '--dry-run', '--recon',
+ dest='no_exec', default=False,
+ action="store_true",
+ help="Don't build; just print commands.")
+
+ op.add_option('--no-site-dir',
+ dest='no_site_dir', default=False,
+ action="store_true",
+ help="Don't search or use the usual site_scons dir.")
+
+ op.add_option('--profile',
+ nargs=1,
+ dest="profile_file", default=None,
+ action="store",
+ help="Profile SCons and put results in FILE.",
+ metavar="FILE")
+
+ op.add_option('-q', '--question',
+ dest="question", default=False,
+ action="store_true",
+ help="Don't build; exit status says if up to date.")
+
+ op.add_option('-Q',
+ dest='no_progress', default=False,
+ action="store_true",
+ help="Suppress \"Reading/Building\" progress messages.")
+
+ op.add_option('--random',
+ dest="random", default=False,
+ action="store_true",
+ help="Build dependencies in random order.")
+
+ op.add_option('-s', '--silent', '--quiet',
+ dest="silent", default=False,
+ action="store_true",
+ help="Don't print commands.")
+
+ op.add_option('--site-dir',
+ nargs=1,
+ dest='site_dir', default=None,
+ action="store",
+ help="Use DIR instead of the usual site_scons dir.",
+ metavar="DIR")
+
+ op.add_option('--stack-size',
+ nargs=1, type="int",
+ dest='stack_size',
+ action="store",
+ help="Set the stack size of the threads used to run jobs to N kilobytes.",
+ metavar="N")
+
+ op.add_option('--taskmastertrace',
+ nargs=1,
+ dest="taskmastertrace_file", default=None,
+ action="store",
+ help="Trace Node evaluation to FILE.",
+ metavar="FILE")
+
+ tree_options = ["all", "derived", "prune", "status"]
+
+ def opt_tree(option, opt, value, parser, tree_options=tree_options):
+ import Main
+ tp = Main.TreePrinter()
+ for o in string.split(value, ','):
+ if o == 'all':
+ tp.derived = False
+ elif o == 'derived':
+ tp.derived = True
+ elif o == 'prune':
+ tp.prune = True
+ elif o == 'status':
+ tp.status = True
+ else:
+ raise OptionValueError("Warning: %s is not a valid --tree option" % o)
+ parser.values.tree_printers.append(tp)
+
+ opt_tree_help = "Print a dependency tree in various formats: %s." \
+ % string.join(tree_options, ", ")
+
+ op.add_option('--tree',
+ nargs=1, type="string",
+ dest="tree_printers", default=[],
+ action="callback", callback=opt_tree,
+ help=opt_tree_help,
+ metavar="OPTIONS")
+
+ op.add_option('-u', '--up', '--search-up',
+ dest="climb_up", default=0,
+ action="store_const", const=1,
+ help="Search up directory tree for SConstruct, "
+ "build targets at or below current directory.")
+
+ op.add_option('-U',
+ dest="climb_up", default=0,
+ action="store_const", const=3,
+ help="Search up directory tree for SConstruct, "
+ "build Default() targets from local SConscript.")
+
+ def opt_version(option, opt, value, parser):
+ sys.stdout.write(parser.version + '\n')
+ sys.exit(0)
+ op.add_option("-v", "--version",
+ action="callback", callback=opt_version,
+ help="Print the SCons version number and exit.")
+
+ def opt_warn(option, opt, value, parser, tree_options=tree_options):
+ if SCons.Util.is_String(value):
+ value = string.split(value, ',')
+ parser.values.warn.extend(value)
+
+ op.add_option('--warn', '--warning',
+ nargs=1, type="string",
+ dest="warn", default=[],
+ action="callback", callback=opt_warn,
+ help="Enable or disable warnings.",
+ metavar="WARNING-SPEC")
+
+ op.add_option('-Y', '--repository', '--srcdir',
+ nargs=1,
+ dest="repository", default=[],
+ action="append",
+ help="Search REPOSITORY for source and target files.")
+
+ # Options from Make and Cons classic that we do not yet support,
+ # but which we may support someday and whose (potential) meanings
+ # we don't want to change. These all get a "the -X option is not
+ # yet implemented" message and don't show up in the help output.
+
+ def opt_not_yet(option, opt, value, parser):
+ msg = "Warning: the %s option is not yet implemented\n" % opt
+ sys.stderr.write(msg)
+ sys.exit(0)
+
+
+ op.add_option('-l', '--load-average', '--max-load',
+ nargs=1, type="int",
+ dest="load_average", default=0,
+ action="callback", callback=opt_not_yet,
+ # action="store",
+ # help="Don't start multiple jobs unless load is below "
+ # "LOAD-AVERAGE."
+ help=SUPPRESS_HELP)
+ op.add_option('--list-actions',
+ dest="list_actions",
+ action="callback", callback=opt_not_yet,
+ # help="Don't build; list files and build actions."
+ help=SUPPRESS_HELP)
+ op.add_option('--list-derived',
+ dest="list_derived",
+ action="callback", callback=opt_not_yet,
+ # help="Don't build; list files that would be built."
+ help=SUPPRESS_HELP)
+ op.add_option('--list-where',
+ dest="list_where",
+ action="callback", callback=opt_not_yet,
+ # help="Don't build; list files and where defined."
+ help=SUPPRESS_HELP)
+ op.add_option('-o', '--old-file', '--assume-old',
+ nargs=1, type="string",
+ dest="old_file", default=[],
+ action="callback", callback=opt_not_yet,
+ # action="append",
+ # help = "Consider FILE to be old; don't rebuild it."
+ help=SUPPRESS_HELP)
+ op.add_option('--override',
+ nargs=1, type="string",
+ action="callback", callback=opt_not_yet,
+ dest="override",
+ # help="Override variables as specified in FILE."
+ help=SUPPRESS_HELP)
+ op.add_option('-p',
+ action="callback", callback=opt_not_yet,
+ dest="p",
+ # help="Print internal environments/objects."
+ help=SUPPRESS_HELP)
+ op.add_option('-r', '-R', '--no-builtin-rules', '--no-builtin-variables',
+ action="callback", callback=opt_not_yet,
+ dest="no_builtin_rules",
+ # help="Clear default environments and variables."
+ help=SUPPRESS_HELP)
+ op.add_option('--write-filenames',
+ nargs=1, type="string",
+ dest="write_filenames",
+ action="callback", callback=opt_not_yet,
+ # help="Write all filenames examined into FILE."
+ help=SUPPRESS_HELP)
+ op.add_option('-W', '--new-file', '--assume-new', '--what-if',
+ nargs=1, type="string",
+ dest="new_file",
+ action="callback", callback=opt_not_yet,
+ # help="Consider FILE to be changed."
+ help=SUPPRESS_HELP)
+ op.add_option('--warn-undefined-variables',
+ dest="warn_undefined_variables",
+ action="callback", callback=opt_not_yet,
+ # help="Warn when an undefined variable is referenced."
+ help=SUPPRESS_HELP)
+
+ return op
diff --git a/tools/scons/scons-local-1.2.0/SCons/Script/SConscript.py b/tools/scons/scons-local-1.2.0/SCons/Script/SConscript.py
new file mode 100644
index 000000000..c52c9798a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Script/SConscript.py
@@ -0,0 +1,632 @@
+"""SCons.Script.SConscript
+
+This module defines the Python API provided to SConscript and SConstruct
+files.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Script/SConscript.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Environment
+import SCons.Errors
+import SCons.Node
+import SCons.Node.Alias
+import SCons.Node.FS
+import SCons.Platform
+import SCons.SConf
+import SCons.Script.Main
+import SCons.Tool
+import SCons.Util
+
+import os
+import os.path
+import re
+import string
+import sys
+import traceback
+import types
+import UserList
+
+# The following variables used to live in this module. Some
+# SConscript files out there may have referred to them directly as
+# SCons.Script.SConscript.*. This is now supported by some special
+# handling towards the bottom of the SConscript.__init__.py module.
+#Arguments = {}
+#ArgList = []
+#BuildTargets = TargetList()
+#CommandLineTargets = []
+#DefaultTargets = []
+
+class SConscriptReturn(Exception):
+ pass
+
+launch_dir = os.path.abspath(os.curdir)
+
+GlobalDict = None
+
+# global exports set by Export():
+global_exports = {}
+
+# chdir flag
+sconscript_chdir = 1
+
+def get_calling_namespaces():
+ """Return the locals and globals for the function that called
+ into this module in the current call stack."""
+ try: 1/0
+ except ZeroDivisionError:
+ # Don't start iterating with the current stack-frame to
+ # prevent creating reference cycles (f_back is safe).
+ frame = sys.exc_info()[2].tb_frame.f_back
+
+ # Find the first frame that *isn't* from this file. This means
+ # that we expect all of the SCons frames that implement an Export()
+ # or SConscript() call to be in this file, so that we can identify
+ # the first non-Script.SConscript frame as the user's local calling
+ # environment, and the locals and globals dictionaries from that
+ # frame as the calling namespaces. See the comment below preceding
+ # the DefaultEnvironmentCall block for even more explanation.
+ while frame.f_globals.get("__name__") == __name__:
+ frame = frame.f_back
+
+ return frame.f_locals, frame.f_globals
+
+
+def compute_exports(exports):
+ """Compute a dictionary of exports given one of the parameters
+ to the Export() function or the exports argument to SConscript()."""
+
+ loc, glob = get_calling_namespaces()
+
+ retval = {}
+ try:
+ for export in exports:
+ if SCons.Util.is_Dict(export):
+ retval.update(export)
+ else:
+ try:
+ retval[export] = loc[export]
+ except KeyError:
+ retval[export] = glob[export]
+ except KeyError, x:
+ raise SCons.Errors.UserError, "Export of non-existent variable '%s'"%x
+
+ return retval
+
+class Frame:
+ """A frame on the SConstruct/SConscript call stack"""
+ def __init__(self, fs, exports, sconscript):
+ self.globals = BuildDefaultGlobals()
+ self.retval = None
+ self.prev_dir = fs.getcwd()
+ self.exports = compute_exports(exports) # exports from the calling SConscript
+ # make sure the sconscript attr is a Node.
+ if isinstance(sconscript, SCons.Node.Node):
+ self.sconscript = sconscript
+ elif sconscript == '-':
+ self.sconscript = None
+ else:
+ self.sconscript = fs.File(str(sconscript))
+
+# the SConstruct/SConscript call stack:
+call_stack = []
+
+# For documentation on the methods in this file, see the scons man-page
+
+def Return(*vars, **kw):
+ retval = []
+ try:
+ fvars = SCons.Util.flatten(vars)
+ for var in fvars:
+ for v in string.split(var):
+ retval.append(call_stack[-1].globals[v])
+ except KeyError, x:
+ raise SCons.Errors.UserError, "Return of non-existent variable '%s'"%x
+
+ if len(retval) == 1:
+ call_stack[-1].retval = retval[0]
+ else:
+ call_stack[-1].retval = tuple(retval)
+
+ stop = kw.get('stop', True)
+
+ if stop:
+ raise SConscriptReturn
+
+
+stack_bottom = '% Stack boTTom %' # hard to define a variable w/this name :)
+
+def _SConscript(fs, *files, **kw):
+ top = fs.Top
+ sd = fs.SConstruct_dir.rdir()
+ exports = kw.get('exports', [])
+
+ # evaluate each SConscript file
+ results = []
+ for fn in files:
+ call_stack.append(Frame(fs, exports, fn))
+ old_sys_path = sys.path
+ try:
+ SCons.Script.sconscript_reading = SCons.Script.sconscript_reading + 1
+ if fn == "-":
+ exec sys.stdin in call_stack[-1].globals
+ else:
+ if isinstance(fn, SCons.Node.Node):
+ f = fn
+ else:
+ f = fs.File(str(fn))
+ _file_ = None
+
+ # Change directory to the top of the source
+ # tree to make sure the os's cwd and the cwd of
+ # fs match so we can open the SConscript.
+ fs.chdir(top, change_os_dir=1)
+ if f.rexists():
+ _file_ = open(f.rfile().get_abspath(), "r")
+ elif f.has_src_builder():
+ # The SConscript file apparently exists in a source
+ # code management system. Build it, but then clear
+ # the builder so that it doesn't get built *again*
+ # during the actual build phase.
+ f.build()
+ f.built()
+ f.builder_set(None)
+ if f.exists():
+ _file_ = open(f.get_abspath(), "r")
+ if _file_:
+ # Chdir to the SConscript directory. Use a path
+ # name relative to the SConstruct file so that if
+ # we're using the -f option, we're essentially
+ # creating a parallel SConscript directory structure
+ # in our local directory tree.
+ #
+ # XXX This is broken for multiple-repository cases
+ # where the SConstruct and SConscript files might be
+ # in different Repositories. For now, cross that
+ # bridge when someone comes to it.
+ try:
+ src_dir = kw['src_dir']
+ except KeyError:
+ ldir = fs.Dir(f.dir.get_path(sd))
+ else:
+ ldir = fs.Dir(src_dir)
+ if not ldir.is_under(f.dir):
+ # They specified a source directory, but
+ # it's above the SConscript directory.
+ # Do the sensible thing and just use the
+ # SConcript directory.
+ ldir = fs.Dir(f.dir.get_path(sd))
+ try:
+ fs.chdir(ldir, change_os_dir=sconscript_chdir)
+ except OSError:
+ # There was no local directory, so we should be
+ # able to chdir to the Repository directory.
+ # Note that we do this directly, not through
+ # fs.chdir(), because we still need to
+ # interpret the stuff within the SConscript file
+ # relative to where we are logically.
+ fs.chdir(ldir, change_os_dir=0)
+ # TODO Not sure how to handle src_dir here
+ os.chdir(f.rfile().dir.get_abspath())
+
+ # Append the SConscript directory to the beginning
+ # of sys.path so Python modules in the SConscript
+ # directory can be easily imported.
+ sys.path = [ f.dir.get_abspath() ] + sys.path
+
+ # This is the magic line that actually reads up
+ # and executes the stuff in the SConscript file.
+ # The locals for this frame contain the special
+ # bottom-of-the-stack marker so that any
+ # exceptions that occur when processing this
+ # SConscript can base the printed frames at this
+ # level and not show SCons internals as well.
+ call_stack[-1].globals.update({stack_bottom:1})
+ old_file = call_stack[-1].globals.get('__file__')
+ try:
+ del call_stack[-1].globals['__file__']
+ except KeyError:
+ pass
+ try:
+ try:
+ exec _file_ in call_stack[-1].globals
+ except SConscriptReturn:
+ pass
+ finally:
+ if old_file is not None:
+ call_stack[-1].globals.update({__file__:old_file})
+ else:
+ SCons.Warnings.warn(SCons.Warnings.MissingSConscriptWarning,
+ "Ignoring missing SConscript '%s'" % f.path)
+
+ finally:
+ SCons.Script.sconscript_reading = SCons.Script.sconscript_reading - 1
+ sys.path = old_sys_path
+ frame = call_stack.pop()
+ try:
+ fs.chdir(frame.prev_dir, change_os_dir=sconscript_chdir)
+ except OSError:
+ # There was no local directory, so chdir to the
+ # Repository directory. Like above, we do this
+ # directly.
+ fs.chdir(frame.prev_dir, change_os_dir=0)
+ rdir = frame.prev_dir.rdir()
+ rdir._create() # Make sure there's a directory there.
+ try:
+ os.chdir(rdir.get_abspath())
+ except OSError, e:
+ # We still couldn't chdir there, so raise the error,
+ # but only if actions are being executed.
+ #
+ # If the -n option was used, the directory would *not*
+ # have been created and we should just carry on and
+ # let things muddle through. This isn't guaranteed
+ # to work if the SConscript files are reading things
+ # from disk (for example), but it should work well
+ # enough for most configurations.
+ if SCons.Action.execute_actions:
+ raise e
+
+ results.append(frame.retval)
+
+ # if we only have one script, don't return a tuple
+ if len(results) == 1:
+ return results[0]
+ else:
+ return tuple(results)
+
+def SConscript_exception(file=sys.stderr):
+ """Print an exception stack trace just for the SConscript file(s).
+ This will show users who have Python errors where the problem is,
+ without cluttering the output with all of the internal calls leading
+ up to where we exec the SConscript."""
+ exc_type, exc_value, exc_tb = sys.exc_info()
+ tb = exc_tb
+ while tb and not tb.tb_frame.f_locals.has_key(stack_bottom):
+ tb = tb.tb_next
+ if not tb:
+ # We did not find our exec statement, so this was actually a bug
+ # in SCons itself. Show the whole stack.
+ tb = exc_tb
+ stack = traceback.extract_tb(tb)
+ try:
+ type = exc_type.__name__
+ except AttributeError:
+ type = str(exc_type)
+ if type[:11] == "exceptions.":
+ type = type[11:]
+ file.write('%s: %s:\n' % (type, exc_value))
+ for fname, line, func, text in stack:
+ file.write(' File "%s", line %d:\n' % (fname, line))
+ file.write(' %s\n' % text)
+
+def annotate(node):
+ """Annotate a node with the stack frame describing the
+ SConscript file and line number that created it."""
+ tb = sys.exc_info()[2]
+ while tb and not tb.tb_frame.f_locals.has_key(stack_bottom):
+ tb = tb.tb_next
+ if not tb:
+ # We did not find any exec of an SConscript file: what?!
+ raise SCons.Errors.InternalError, "could not find SConscript stack frame"
+ node.creator = traceback.extract_stack(tb)[0]
+
+# The following line would cause each Node to be annotated using the
+# above function. Unfortunately, this is a *huge* performance hit, so
+# leave this disabled until we find a more efficient mechanism.
+#SCons.Node.Annotate = annotate
+
+class SConsEnvironment(SCons.Environment.Base):
+ """An Environment subclass that contains all of the methods that
+ are particular to the wrapper SCons interface and which aren't
+ (or shouldn't be) part of the build engine itself.
+
+ Note that not all of the methods of this class have corresponding
+ global functions, there are some private methods.
+ """
+
+ #
+ # Private methods of an SConsEnvironment.
+ #
+ def _exceeds_version(self, major, minor, v_major, v_minor):
+ """Return 1 if 'major' and 'minor' are greater than the version
+ in 'v_major' and 'v_minor', and 0 otherwise."""
+ return (major > v_major or (major == v_major and minor > v_minor))
+
+ def _get_major_minor_revision(self, version_string):
+ """Split a version string into major, minor and (optionally)
+ revision parts.
+
+ This is complicated by the fact that a version string can be
+ something like 3.2b1."""
+ version = string.split(string.split(version_string, ' ')[0], '.')
+ v_major = int(version[0])
+ v_minor = int(re.match('\d+', version[1]).group())
+ if len(version) >= 3:
+ v_revision = int(re.match('\d+', version[2]).group())
+ else:
+ v_revision = 0
+ return v_major, v_minor, v_revision
+
+ def _get_SConscript_filenames(self, ls, kw):
+ """
+ Convert the parameters passed to # SConscript() calls into a list
+ of files and export variables. If the parameters are invalid,
+ throws SCons.Errors.UserError. Returns a tuple (l, e) where l
+ is a list of SConscript filenames and e is a list of exports.
+ """
+ exports = []
+
+ if len(ls) == 0:
+ try:
+ dirs = kw["dirs"]
+ except KeyError:
+ raise SCons.Errors.UserError, \
+ "Invalid SConscript usage - no parameters"
+
+ if not SCons.Util.is_List(dirs):
+ dirs = [ dirs ]
+ dirs = map(str, dirs)
+
+ name = kw.get('name', 'SConscript')
+
+ files = map(lambda n, name = name: os.path.join(n, name), dirs)
+
+ elif len(ls) == 1:
+
+ files = ls[0]
+
+ elif len(ls) == 2:
+
+ files = ls[0]
+ exports = self.Split(ls[1])
+
+ else:
+
+ raise SCons.Errors.UserError, \
+ "Invalid SConscript() usage - too many arguments"
+
+ if not SCons.Util.is_List(files):
+ files = [ files ]
+
+ if kw.get('exports'):
+ exports.extend(self.Split(kw['exports']))
+
+ variant_dir = kw.get('variant_dir') or kw.get('build_dir')
+ if variant_dir:
+ if len(files) != 1:
+ raise SCons.Errors.UserError, \
+ "Invalid SConscript() usage - can only specify one SConscript with a variant_dir"
+ duplicate = kw.get('duplicate', 1)
+ src_dir = kw.get('src_dir')
+ if not src_dir:
+ src_dir, fname = os.path.split(str(files[0]))
+ files = [os.path.join(str(variant_dir), fname)]
+ else:
+ if not isinstance(src_dir, SCons.Node.Node):
+ src_dir = self.fs.Dir(src_dir)
+ fn = files[0]
+ if not isinstance(fn, SCons.Node.Node):
+ fn = self.fs.File(fn)
+ if fn.is_under(src_dir):
+ # Get path relative to the source directory.
+ fname = fn.get_path(src_dir)
+ files = [os.path.join(str(variant_dir), fname)]
+ else:
+ files = [fn.abspath]
+ kw['src_dir'] = variant_dir
+ self.fs.VariantDir(variant_dir, src_dir, duplicate)
+
+ return (files, exports)
+
+ #
+ # Public methods of an SConsEnvironment. These get
+ # entry points in the global name space so they can be called
+ # as global functions.
+ #
+
+ def Configure(self, *args, **kw):
+ if not SCons.Script.sconscript_reading:
+ raise SCons.Errors.UserError, "Calling Configure from Builders is not supported."
+ kw['_depth'] = kw.get('_depth', 0) + 1
+ return apply(SCons.Environment.Base.Configure, (self,)+args, kw)
+
+ def Default(self, *targets):
+ SCons.Script._Set_Default_Targets(self, targets)
+
+ def EnsureSConsVersion(self, major, minor, revision=0):
+ """Exit abnormally if the SCons version is not late enough."""
+ scons_ver = self._get_major_minor_revision(SCons.__version__)
+ if scons_ver < (major, minor, revision):
+ if revision:
+ scons_ver_string = '%d.%d.%d' % (major, minor, revision)
+ else:
+ scons_ver_string = '%d.%d' % (major, minor)
+ print "SCons %s or greater required, but you have SCons %s" % \
+ (scons_ver_string, SCons.__version__)
+ sys.exit(2)
+
+ def EnsurePythonVersion(self, major, minor):
+ """Exit abnormally if the Python version is not late enough."""
+ try:
+ v_major, v_minor, v_micro, release, serial = sys.version_info
+ python_ver = (v_major, v_minor)
+ except AttributeError:
+ python_ver = self._get_major_minor_revision(sys.version)[:2]
+ if python_ver < (major, minor):
+ v = string.split(sys.version, " ", 1)[0]
+ print "Python %d.%d or greater required, but you have Python %s" %(major,minor,v)
+ sys.exit(2)
+
+ def Exit(self, value=0):
+ sys.exit(value)
+
+ def Export(self, *vars):
+ for var in vars:
+ global_exports.update(compute_exports(self.Split(var)))
+
+ def GetLaunchDir(self):
+ global launch_dir
+ return launch_dir
+
+ def GetOption(self, name):
+ name = self.subst(name)
+ return SCons.Script.Main.GetOption(name)
+
+ def Help(self, text):
+ text = self.subst(text, raw=1)
+ SCons.Script.HelpFunction(text)
+
+ def Import(self, *vars):
+ try:
+ frame = call_stack[-1]
+ globals = frame.globals
+ exports = frame.exports
+ for var in vars:
+ var = self.Split(var)
+ for v in var:
+ if v == '*':
+ globals.update(global_exports)
+ globals.update(exports)
+ else:
+ if exports.has_key(v):
+ globals[v] = exports[v]
+ else:
+ globals[v] = global_exports[v]
+ except KeyError,x:
+ raise SCons.Errors.UserError, "Import of non-existent variable '%s'"%x
+
+ def SConscript(self, *ls, **kw):
+ def subst_element(x, subst=self.subst):
+ if SCons.Util.is_List(x):
+ x = map(subst, x)
+ else:
+ x = subst(x)
+ return x
+ ls = map(subst_element, ls)
+ subst_kw = {}
+ for key, val in kw.items():
+ if SCons.Util.is_String(val):
+ val = self.subst(val)
+ elif SCons.Util.is_List(val):
+ result = []
+ for v in val:
+ if SCons.Util.is_String(v):
+ v = self.subst(v)
+ result.append(v)
+ val = result
+ subst_kw[key] = val
+
+ files, exports = self._get_SConscript_filenames(ls, subst_kw)
+ subst_kw['exports'] = exports
+ return apply(_SConscript, [self.fs,] + files, subst_kw)
+
+ def SConscriptChdir(self, flag):
+ global sconscript_chdir
+ sconscript_chdir = flag
+
+ def SetOption(self, name, value):
+ name = self.subst(name)
+ SCons.Script.Main.SetOption(name, value)
+
+#
+#
+#
+SCons.Environment.Environment = SConsEnvironment
+
+def Configure(*args, **kw):
+ if not SCons.Script.sconscript_reading:
+ raise SCons.Errors.UserError, "Calling Configure from Builders is not supported."
+ kw['_depth'] = 1
+ return apply(SCons.SConf.SConf, args, kw)
+
+# It's very important that the DefaultEnvironmentCall() class stay in this
+# file, with the get_calling_namespaces() function, the compute_exports()
+# function, the Frame class and the SConsEnvironment.Export() method.
+# These things make up the calling stack leading up to the actual global
+# Export() or SConscript() call that the user issued. We want to allow
+# users to export local variables that they define, like so:
+#
+# def func():
+# x = 1
+# Export('x')
+#
+# To support this, the get_calling_namespaces() function assumes that
+# the *first* stack frame that's not from this file is the local frame
+# for the Export() or SConscript() call.
+
+_DefaultEnvironmentProxy = None
+
+def get_DefaultEnvironmentProxy():
+ global _DefaultEnvironmentProxy
+ if not _DefaultEnvironmentProxy:
+ default_env = SCons.Defaults.DefaultEnvironment()
+ _DefaultEnvironmentProxy = SCons.Environment.NoSubstitutionProxy(default_env)
+ return _DefaultEnvironmentProxy
+
+class DefaultEnvironmentCall:
+ """A class that implements "global function" calls of
+ Environment methods by fetching the specified method from the
+ DefaultEnvironment's class. Note that this uses an intermediate
+ proxy class instead of calling the DefaultEnvironment method
+ directly so that the proxy can override the subst() method and
+ thereby prevent expansion of construction variables (since from
+ the user's point of view this was called as a global function,
+ with no associated construction environment)."""
+ def __init__(self, method_name, subst=0):
+ self.method_name = method_name
+ if subst:
+ self.factory = SCons.Defaults.DefaultEnvironment
+ else:
+ self.factory = get_DefaultEnvironmentProxy
+ def __call__(self, *args, **kw):
+ env = self.factory()
+ method = getattr(env, self.method_name)
+ return apply(method, args, kw)
+
+
+def BuildDefaultGlobals():
+ """
+ Create a dictionary containing all the default globals for
+ SConstruct and SConscript files.
+ """
+
+ global GlobalDict
+ if GlobalDict is None:
+ GlobalDict = {}
+
+ import SCons.Script
+ d = SCons.Script.__dict__
+ def not_a_module(m, d=d, mtype=type(SCons.Script)):
+ return type(d[m]) != mtype
+ for m in filter(not_a_module, dir(SCons.Script)):
+ GlobalDict[m] = d[m]
+
+ return GlobalDict.copy()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Script/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Script/__init__.py
new file mode 100644
index 000000000..ad9999131
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Script/__init__.py
@@ -0,0 +1,408 @@
+"""SCons.Script
+
+This file implements the main() function used by the scons script.
+
+Architecturally, this *is* the scons script, and will likely only be
+called from the external "scons" wrapper. Consequently, anything here
+should not be, or be considered, part of the build engine. If it's
+something that we expect other software to want to use, it should go in
+some other module. If it's specific to the "scons" script invocation,
+it goes here.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Script/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import time
+start_time = time.time()
+
+import os
+import string
+import sys
+import UserList
+
+# Special chicken-and-egg handling of the "--debug=memoizer" flag:
+#
+# SCons.Memoize contains a metaclass implementation that affects how
+# the other classes are instantiated. The Memoizer may add shim methods
+# to classes that have methods that cache computed values in order to
+# count and report the hits and misses.
+#
+# If we wait to enable the Memoization until after we've parsed the
+# command line options normally, it will be too late, because the Memoizer
+# will have already analyzed the classes that it's Memoizing and decided
+# to not add the shims. So we use a special-case, up-front check for
+# the "--debug=memoizer" flag and enable Memoizer before we import any
+# of the other modules that use it.
+
+_args = sys.argv + string.split(os.environ.get('SCONSFLAGS', ''))
+if "--debug=memoizer" in _args:
+ import SCons.Memoize
+ import SCons.Warnings
+ try:
+ SCons.Memoize.EnableMemoization()
+ except SCons.Warnings.Warning:
+ # Some warning was thrown (inability to --debug=memoizer on
+ # Python 1.5.2 because it doesn't have metaclasses). Arrange
+ # for it to be displayed or not after warnings are configured.
+ import Main
+ exc_type, exc_value, tb = sys.exc_info()
+ Main.delayed_warnings.append((exc_type, exc_value))
+del _args
+
+import SCons.Action
+import SCons.Builder
+import SCons.Environment
+import SCons.Node.FS
+import SCons.Options
+import SCons.Platform
+import SCons.Scanner
+import SCons.SConf
+import SCons.Subst
+import SCons.Tool
+import SCons.Util
+import SCons.Variables
+import SCons.Defaults
+
+import Main
+
+main = Main.main
+
+# The following are global class definitions and variables that used to
+# live directly in this module back before 0.96.90, when it contained
+# a lot of code. Some SConscript files in widely-distributed packages
+# (Blender is the specific example) actually reached into SCons.Script
+# directly to use some of these. Rather than break those SConscript
+# files, we're going to propagate these names into the SCons.Script
+# namespace here.
+#
+# Some of these are commented out because it's *really* unlikely anyone
+# used them, but we're going to leave the comment here to try to make
+# it obvious what to do if the situation arises.
+BuildTask = Main.BuildTask
+CleanTask = Main.CleanTask
+QuestionTask = Main.QuestionTask
+#PrintHelp = Main.PrintHelp
+#SConscriptSettableOptions = Main.SConscriptSettableOptions
+
+AddOption = Main.AddOption
+GetOption = Main.GetOption
+SetOption = Main.SetOption
+Progress = Main.Progress
+GetBuildFailures = Main.GetBuildFailures
+
+#keep_going_on_error = Main.keep_going_on_error
+#print_dtree = Main.print_dtree
+#print_explanations = Main.print_explanations
+#print_includes = Main.print_includes
+#print_objects = Main.print_objects
+#print_time = Main.print_time
+#print_tree = Main.print_tree
+#memory_stats = Main.memory_stats
+#ignore_errors = Main.ignore_errors
+#sconscript_time = Main.sconscript_time
+#command_time = Main.command_time
+#exit_status = Main.exit_status
+#profiling = Main.profiling
+#repositories = Main.repositories
+
+#
+import SConscript
+_SConscript = SConscript
+
+call_stack = _SConscript.call_stack
+
+#
+Action = SCons.Action.Action
+AddMethod = SCons.Util.AddMethod
+AllowSubstExceptions = SCons.Subst.SetAllowableExceptions
+Builder = SCons.Builder.Builder
+Configure = _SConscript.Configure
+Environment = SCons.Environment.Environment
+#OptParser = SCons.SConsOptions.OptParser
+FindPathDirs = SCons.Scanner.FindPathDirs
+Platform = SCons.Platform.Platform
+Return = _SConscript.Return
+Scanner = SCons.Scanner.Base
+Tool = SCons.Tool.Tool
+WhereIs = SCons.Util.WhereIs
+
+#
+BoolVariable = SCons.Variables.BoolVariable
+EnumVariable = SCons.Variables.EnumVariable
+ListVariable = SCons.Variables.ListVariable
+PackageVariable = SCons.Variables.PackageVariable
+PathVariable = SCons.Variables.PathVariable
+
+# Deprecated names that will go away some day.
+BoolOption = SCons.Options.BoolOption
+EnumOption = SCons.Options.EnumOption
+ListOption = SCons.Options.ListOption
+PackageOption = SCons.Options.PackageOption
+PathOption = SCons.Options.PathOption
+
+# Action factories.
+Chmod = SCons.Defaults.Chmod
+Copy = SCons.Defaults.Copy
+Delete = SCons.Defaults.Delete
+Mkdir = SCons.Defaults.Mkdir
+Move = SCons.Defaults.Move
+Touch = SCons.Defaults.Touch
+
+# Pre-made, public scanners.
+CScanner = SCons.Tool.CScanner
+DScanner = SCons.Tool.DScanner
+DirScanner = SCons.Defaults.DirScanner
+ProgramScanner = SCons.Tool.ProgramScanner
+SourceFileScanner = SCons.Tool.SourceFileScanner
+
+# Functions we might still convert to Environment methods.
+CScan = SCons.Defaults.CScan
+DefaultEnvironment = SCons.Defaults.DefaultEnvironment
+
+# Other variables we provide.
+class TargetList(UserList.UserList):
+ def _do_nothing(self, *args, **kw):
+ pass
+ def _add_Default(self, list):
+ self.extend(list)
+ def _clear(self):
+ del self[:]
+
+ARGUMENTS = {}
+ARGLIST = []
+BUILD_TARGETS = TargetList()
+COMMAND_LINE_TARGETS = []
+DEFAULT_TARGETS = []
+
+# BUILD_TARGETS can be modified in the SConscript files. If so, we
+# want to treat the modified BUILD_TARGETS list as if they specified
+# targets on the command line. To do that, though, we need to know if
+# BUILD_TARGETS was modified through "official" APIs or by hand. We do
+# this by updating two lists in parallel, the documented BUILD_TARGETS
+# list, above, and this internal _build_plus_default targets list which
+# should only have "official" API changes. Then Script/Main.py can
+# compare these two afterwards to figure out if the user added their
+# own targets to BUILD_TARGETS.
+_build_plus_default = TargetList()
+
+def _Add_Arguments(alist):
+ for arg in alist:
+ a, b = string.split(arg, '=', 1)
+ ARGUMENTS[a] = b
+ ARGLIST.append((a, b))
+
+def _Add_Targets(tlist):
+ if tlist:
+ COMMAND_LINE_TARGETS.extend(tlist)
+ BUILD_TARGETS.extend(tlist)
+ BUILD_TARGETS._add_Default = BUILD_TARGETS._do_nothing
+ BUILD_TARGETS._clear = BUILD_TARGETS._do_nothing
+ _build_plus_default.extend(tlist)
+ _build_plus_default._add_Default = _build_plus_default._do_nothing
+ _build_plus_default._clear = _build_plus_default._do_nothing
+
+def _Set_Default_Targets_Has_Been_Called(d, fs):
+ return DEFAULT_TARGETS
+
+def _Set_Default_Targets_Has_Not_Been_Called(d, fs):
+ if d is None:
+ d = [fs.Dir('.')]
+ return d
+
+_Get_Default_Targets = _Set_Default_Targets_Has_Not_Been_Called
+
+def _Set_Default_Targets(env, tlist):
+ global DEFAULT_TARGETS
+ global _Get_Default_Targets
+ _Get_Default_Targets = _Set_Default_Targets_Has_Been_Called
+ for t in tlist:
+ if t is None:
+ # Delete the elements from the list in-place, don't
+ # reassign an empty list to DEFAULT_TARGETS, so that the
+ # variables will still point to the same object we point to.
+ del DEFAULT_TARGETS[:]
+ BUILD_TARGETS._clear()
+ _build_plus_default._clear()
+ elif isinstance(t, SCons.Node.Node):
+ DEFAULT_TARGETS.append(t)
+ BUILD_TARGETS._add_Default([t])
+ _build_plus_default._add_Default([t])
+ else:
+ nodes = env.arg2nodes(t, env.fs.Entry)
+ DEFAULT_TARGETS.extend(nodes)
+ BUILD_TARGETS._add_Default(nodes)
+ _build_plus_default._add_Default(nodes)
+
+#
+help_text = None
+
+def HelpFunction(text):
+ global help_text
+ if SCons.Script.help_text is None:
+ SCons.Script.help_text = text
+ else:
+ help_text = help_text + text
+
+#
+# Will be non-zero if we are reading an SConscript file.
+sconscript_reading = 0
+
+#
+def Variables(files=[], args=ARGUMENTS):
+ return SCons.Variables.Variables(files, args)
+
+def Options(files=[], args=ARGUMENTS):
+ return SCons.Options.Options(files, args)
+
+# The list of global functions to add to the SConscript name space
+# that end up calling corresponding methods or Builders in the
+# DefaultEnvironment().
+GlobalDefaultEnvironmentFunctions = [
+ # Methods from the SConsEnvironment class, above.
+ 'Default',
+ 'EnsurePythonVersion',
+ 'EnsureSConsVersion',
+ 'Exit',
+ 'Export',
+ 'GetLaunchDir',
+ 'Help',
+ 'Import',
+ #'SConscript', is handled separately, below.
+ 'SConscriptChdir',
+
+ # Methods from the Environment.Base class.
+ 'AddPostAction',
+ 'AddPreAction',
+ 'Alias',
+ 'AlwaysBuild',
+ 'BuildDir',
+ 'CacheDir',
+ 'Clean',
+ #The Command() method is handled separately, below.
+ 'Decider',
+ 'Depends',
+ 'Dir',
+ 'NoClean',
+ 'NoCache',
+ 'Entry',
+ 'Execute',
+ 'File',
+ 'FindFile',
+ 'FindInstalledFiles',
+ 'FindSourceFiles',
+ 'Flatten',
+ 'GetBuildPath',
+ 'Glob',
+ 'Ignore',
+ 'Install',
+ 'InstallAs',
+ 'Literal',
+ 'Local',
+ 'ParseDepends',
+ 'Precious',
+ 'Repository',
+ 'Requires',
+ 'SConsignFile',
+ 'SideEffect',
+ 'SourceCode',
+ 'SourceSignatures',
+ 'Split',
+ 'Tag',
+ 'TargetSignatures',
+ 'Value',
+ 'VariantDir',
+]
+
+GlobalDefaultBuilders = [
+ # Supported builders.
+ 'CFile',
+ 'CXXFile',
+ 'DVI',
+ 'Jar',
+ 'Java',
+ 'JavaH',
+ 'Library',
+ 'M4',
+ 'MSVSProject',
+ 'Object',
+ 'PCH',
+ 'PDF',
+ 'PostScript',
+ 'Program',
+ 'RES',
+ 'RMIC',
+ 'SharedLibrary',
+ 'SharedObject',
+ 'StaticLibrary',
+ 'StaticObject',
+ 'Tar',
+ 'TypeLibrary',
+ 'Zip',
+ 'Package',
+]
+
+for name in GlobalDefaultEnvironmentFunctions + GlobalDefaultBuilders:
+ exec "%s = _SConscript.DefaultEnvironmentCall(%s)" % (name, repr(name))
+del name
+
+# There are a handful of variables that used to live in the
+# Script/SConscript.py module that some SConscript files out there were
+# accessing directly as SCons.Script.SConscript.*. The problem is that
+# "SConscript" in this namespace is no longer a module, it's a global
+# function call--or more precisely, an object that implements a global
+# function call through the default Environment. Nevertheless, we can
+# maintain backwards compatibility for SConscripts that were reaching in
+# this way by hanging some attributes off the "SConscript" object here.
+SConscript = _SConscript.DefaultEnvironmentCall('SConscript')
+
+# Make SConscript look enough like the module it used to be so
+# that pychecker doesn't barf.
+SConscript.__name__ = 'SConscript'
+
+SConscript.Arguments = ARGUMENTS
+SConscript.ArgList = ARGLIST
+SConscript.BuildTargets = BUILD_TARGETS
+SConscript.CommandLineTargets = COMMAND_LINE_TARGETS
+SConscript.DefaultTargets = DEFAULT_TARGETS
+
+# The global Command() function must be handled differently than the
+# global functions for other construction environment methods because
+# we want people to be able to use Actions that must expand $TARGET
+# and $SOURCE later, when (and if) the Action is invoked to build
+# the target(s). We do this with the subst=1 argument, which creates
+# a DefaultEnvironmentCall instance that wraps up a normal default
+# construction environment that performs variable substitution, not a
+# proxy that doesn't.
+#
+# There's a flaw here, though, because any other $-variables on a command
+# line will *also* be expanded, each to a null string, but that should
+# only be a problem in the unusual case where someone was passing a '$'
+# on a command line and *expected* the $ to get through to the shell
+# because they were calling Command() and not env.Command()... This is
+# unlikely enough that we're going to leave this as is and cross that
+# bridge if someone actually comes to it.
+Command = _SConscript.DefaultEnvironmentCall('Command', subst=1)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Sig.py b/tools/scons/scons-local-1.2.0/SCons/Sig.py
new file mode 100644
index 000000000..2e50308c5
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Sig.py
@@ -0,0 +1,57 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Sig.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """Place-holder for the old SCons.Sig module hierarchy
+
+This is no longer used, but code out there (such as the NSIS module on
+the SCons wiki) may try to import SCons.Sig. If so, we generate a warning
+that points them to the line that caused the import, and don't die.
+
+If someone actually tried to use the sub-modules or functions within
+the package (for example, SCons.Sig.MD5.signature()), then they'll still
+get an AttributeError, but at least they'll know where to start looking.
+"""
+
+import SCons.Util
+import SCons.Warnings
+
+msg = 'The SCons.Sig module no longer exists.\n' \
+ ' Remove the following "import SCons.Sig" line to eliminate this warning:'
+
+SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning, msg)
+
+default_calc = None
+default_module = None
+
+class MD5Null(SCons.Util.Null):
+ def __repr__(self):
+ return "MD5Null()"
+
+class TimeStampNull(SCons.Util.Null):
+ def __repr__(self):
+ return "TimeStampNull()"
+
+MD5 = MD5Null()
+TimeStamp = TimeStampNull()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Subst.py b/tools/scons/scons-local-1.2.0/SCons/Subst.py
new file mode 100644
index 000000000..afebca43f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Subst.py
@@ -0,0 +1,884 @@
+"""SCons.Subst
+
+SCons string substitution.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Subst.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+import types
+import UserList
+import UserString
+
+import SCons.Errors
+
+from SCons.Util import is_String, is_Sequence
+
+# Indexed by the SUBST_* constants below.
+_strconv = [SCons.Util.to_String_for_subst,
+ SCons.Util.to_String_for_subst,
+ SCons.Util.to_String_for_signature]
+
+
+
+AllowableExceptions = (IndexError, NameError)
+
+def SetAllowableExceptions(*excepts):
+ global AllowableExceptions
+ AllowableExceptions = filter(None, excepts)
+
+def raise_exception(exception, target, s):
+ name = exception.__class__.__name__
+ msg = "%s `%s' trying to evaluate `%s'" % (name, exception, s)
+ if target:
+ raise SCons.Errors.BuildError, (target[0], msg)
+ else:
+ raise SCons.Errors.UserError, msg
+
+
+
+class Literal:
+ """A wrapper for a string. If you use this object wrapped
+ around a string, then it will be interpreted as literal.
+ When passed to the command interpreter, all special
+ characters will be escaped."""
+ def __init__(self, lstr):
+ self.lstr = lstr
+
+ def __str__(self):
+ return self.lstr
+
+ def escape(self, escape_func):
+ return escape_func(self.lstr)
+
+ def for_signature(self):
+ return self.lstr
+
+ def is_literal(self):
+ return 1
+
+class SpecialAttrWrapper:
+ """This is a wrapper for what we call a 'Node special attribute.'
+ This is any of the attributes of a Node that we can reference from
+ Environment variable substitution, such as $TARGET.abspath or
+ $SOURCES[1].filebase. We implement the same methods as Literal
+ so we can handle special characters, plus a for_signature method,
+ such that we can return some canonical string during signature
+ calculation to avoid unnecessary rebuilds."""
+
+ def __init__(self, lstr, for_signature=None):
+ """The for_signature parameter, if supplied, will be the
+ canonical string we return from for_signature(). Else
+ we will simply return lstr."""
+ self.lstr = lstr
+ if for_signature:
+ self.forsig = for_signature
+ else:
+ self.forsig = lstr
+
+ def __str__(self):
+ return self.lstr
+
+ def escape(self, escape_func):
+ return escape_func(self.lstr)
+
+ def for_signature(self):
+ return self.forsig
+
+ def is_literal(self):
+ return 1
+
+def quote_spaces(arg):
+ """Generic function for putting double quotes around any string that
+ has white space in it."""
+ if ' ' in arg or '\t' in arg:
+ return '"%s"' % arg
+ else:
+ return str(arg)
+
+class CmdStringHolder(UserString.UserString):
+ """This is a special class used to hold strings generated by
+ scons_subst() and scons_subst_list(). It defines a special method
+ escape(). When passed a function with an escape algorithm for a
+ particular platform, it will return the contained string with the
+ proper escape sequences inserted.
+ """
+ def __init__(self, cmd, literal=None):
+ UserString.UserString.__init__(self, cmd)
+ self.literal = literal
+
+ def is_literal(self):
+ return self.literal
+
+ def escape(self, escape_func, quote_func=quote_spaces):
+ """Escape the string with the supplied function. The
+ function is expected to take an arbitrary string, then
+ return it with all special characters escaped and ready
+ for passing to the command interpreter.
+
+ After calling this function, the next call to str() will
+ return the escaped string.
+ """
+
+ if self.is_literal():
+ return escape_func(self.data)
+ elif ' ' in self.data or '\t' in self.data:
+ return quote_func(self.data)
+ else:
+ return self.data
+
+def escape_list(list, escape_func):
+ """Escape a list of arguments by running the specified escape_func
+ on every object in the list that has an escape() method."""
+ def escape(obj, escape_func=escape_func):
+ try:
+ e = obj.escape
+ except AttributeError:
+ return obj
+ else:
+ return e(escape_func)
+ return map(escape, list)
+
+class NLWrapper:
+ """A wrapper class that delays turning a list of sources or targets
+ into a NodeList until it's needed. The specified function supplied
+ when the object is initialized is responsible for turning raw nodes
+ into proxies that implement the special attributes like .abspath,
+ .source, etc. This way, we avoid creating those proxies just
+ "in case" someone is going to use $TARGET or the like, and only
+ go through the trouble if we really have to.
+
+ In practice, this might be a wash performance-wise, but it's a little
+ cleaner conceptually...
+ """
+
+ def __init__(self, list, func):
+ self.list = list
+ self.func = func
+ def _return_nodelist(self):
+ return self.nodelist
+ def _gen_nodelist(self):
+ list = self.list
+ if list is None:
+ list = []
+ elif not is_Sequence(list):
+ list = [list]
+ # The map(self.func) call is what actually turns
+ # a list into appropriate proxies.
+ self.nodelist = SCons.Util.NodeList(map(self.func, list))
+ self._create_nodelist = self._return_nodelist
+ return self.nodelist
+ _create_nodelist = _gen_nodelist
+
+
+class Targets_or_Sources(UserList.UserList):
+ """A class that implements $TARGETS or $SOURCES expansions by in turn
+ wrapping a NLWrapper. This class handles the different methods used
+ to access the list, calling the NLWrapper to create proxies on demand.
+
+ Note that we subclass UserList.UserList purely so that the
+ is_Sequence() function will identify an object of this class as
+ a list during variable expansion. We're not really using any
+ UserList.UserList methods in practice.
+ """
+ def __init__(self, nl):
+ self.nl = nl
+ def __getattr__(self, attr):
+ nl = self.nl._create_nodelist()
+ return getattr(nl, attr)
+ def __getitem__(self, i):
+ nl = self.nl._create_nodelist()
+ return nl[i]
+ def __getslice__(self, i, j):
+ nl = self.nl._create_nodelist()
+ i = max(i, 0); j = max(j, 0)
+ return nl[i:j]
+ def __str__(self):
+ nl = self.nl._create_nodelist()
+ return str(nl)
+ def __repr__(self):
+ nl = self.nl._create_nodelist()
+ return repr(nl)
+
+class Target_or_Source:
+ """A class that implements $TARGET or $SOURCE expansions by in turn
+ wrapping a NLWrapper. This class handles the different methods used
+ to access an individual proxy Node, calling the NLWrapper to create
+ a proxy on demand.
+ """
+ def __init__(self, nl):
+ self.nl = nl
+ def __getattr__(self, attr):
+ nl = self.nl._create_nodelist()
+ try:
+ nl0 = nl[0]
+ except IndexError:
+ # If there is nothing in the list, then we have no attributes to
+ # pass through, so raise AttributeError for everything.
+ raise AttributeError, "NodeList has no attribute: %s" % attr
+ return getattr(nl0, attr)
+ def __str__(self):
+ nl = self.nl._create_nodelist()
+ if nl:
+ return str(nl[0])
+ return ''
+ def __repr__(self):
+ nl = self.nl._create_nodelist()
+ if nl:
+ return repr(nl[0])
+ return ''
+
+def subst_dict(target, source):
+ """Create a dictionary for substitution of special
+ construction variables.
+
+ This translates the following special arguments:
+
+ target - the target (object or array of objects),
+ used to generate the TARGET and TARGETS
+ construction variables
+
+ source - the source (object or array of objects),
+ used to generate the SOURCES and SOURCE
+ construction variables
+ """
+ dict = {}
+
+ if target:
+ def get_tgt_subst_proxy(thing):
+ try:
+ subst_proxy = thing.get_subst_proxy()
+ except AttributeError:
+ subst_proxy = thing # probably a string, just return it
+ return subst_proxy
+ tnl = NLWrapper(target, get_tgt_subst_proxy)
+ dict['TARGETS'] = Targets_or_Sources(tnl)
+ dict['TARGET'] = Target_or_Source(tnl)
+ else:
+ dict['TARGETS'] = None
+ dict['TARGET'] = None
+
+ if source:
+ def get_src_subst_proxy(node):
+ try:
+ rfile = node.rfile
+ except AttributeError:
+ pass
+ else:
+ node = rfile()
+ try:
+ return node.get_subst_proxy()
+ except AttributeError:
+ return node # probably a String, just return it
+ snl = NLWrapper(source, get_src_subst_proxy)
+ dict['SOURCES'] = Targets_or_Sources(snl)
+ dict['SOURCE'] = Target_or_Source(snl)
+ else:
+ dict['SOURCES'] = None
+ dict['SOURCE'] = None
+
+ return dict
+
+# Constants for the "mode" parameter to scons_subst_list() and
+# scons_subst(). SUBST_RAW gives the raw command line. SUBST_CMD
+# gives a command line suitable for passing to a shell. SUBST_SIG
+# gives a command line appropriate for calculating the signature
+# of a command line...if this changes, we should rebuild.
+SUBST_CMD = 0
+SUBST_RAW = 1
+SUBST_SIG = 2
+
+_rm = re.compile(r'\$[()]')
+_remove = re.compile(r'\$\([^\$]*(\$[^\)][^\$]*)*\$\)')
+
+# Indexed by the SUBST_* constants above.
+_regex_remove = [ _rm, None, _remove ]
+
+def _rm_list(list):
+ #return [ l for l in list if not l in ('$(', '$)') ]
+ return filter(lambda l: not l in ('$(', '$)'), list)
+
+def _remove_list(list):
+ result = []
+ do_append = result.append
+ for l in list:
+ if l == '$(':
+ do_append = lambda x: None
+ elif l == '$)':
+ do_append = result.append
+ else:
+ do_append(l)
+ return result
+
+# Indexed by the SUBST_* constants above.
+_list_remove = [ _rm_list, None, _remove_list ]
+
+# Regular expressions for splitting strings and handling substitutions,
+# for use by the scons_subst() and scons_subst_list() functions:
+#
+# The first expression compiled matches all of the $-introduced tokens
+# that we need to process in some way, and is used for substitutions.
+# The expressions it matches are:
+#
+# "$$"
+# "$("
+# "$)"
+# "$variable" [must begin with alphabetic or underscore]
+# "${any stuff}"
+#
+# The second expression compiled is used for splitting strings into tokens
+# to be processed, and it matches all of the tokens listed above, plus
+# the following that affect how arguments do or don't get joined together:
+#
+# " " [white space]
+# "non-white-space" [without any dollar signs]
+# "$" [single dollar sign]
+#
+_dollar_exps_str = r'\$[\$\(\)]|\$[_a-zA-Z][\.\w]*|\${[^}]*}'
+_dollar_exps = re.compile(r'(%s)' % _dollar_exps_str)
+_separate_args = re.compile(r'(%s|\s+|[^\s\$]+|\$)' % _dollar_exps_str)
+
+# This regular expression is used to replace strings of multiple white
+# space characters in the string result from the scons_subst() function.
+_space_sep = re.compile(r'[\t ]+(?![^{]*})')
+
+def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
+ """Expand a string or list containing construction variable
+ substitutions.
+
+ This is the work-horse function for substitutions in file names
+ and the like. The companion scons_subst_list() function (below)
+ handles separating command lines into lists of arguments, so see
+ that function if that's what you're looking for.
+ """
+ if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0:
+ return strSubst
+
+ class StringSubber:
+ """A class to construct the results of a scons_subst() call.
+
+ This binds a specific construction environment, mode, target and
+ source with two methods (substitute() and expand()) that handle
+ the expansion.
+ """
+ def __init__(self, env, mode, target, source, conv, gvars):
+ self.env = env
+ self.mode = mode
+ self.target = target
+ self.source = source
+ self.conv = conv
+ self.gvars = gvars
+
+ def expand(self, s, lvars):
+ """Expand a single "token" as necessary, returning an
+ appropriate string containing the expansion.
+
+ This handles expanding different types of things (strings,
+ lists, callables) appropriately. It calls the wrapper
+ substitute() method to re-expand things as necessary, so that
+ the results of expansions of side-by-side strings still get
+ re-evaluated separately, not smushed together.
+ """
+ if is_String(s):
+ try:
+ s0, s1 = s[:2]
+ except (IndexError, ValueError):
+ return s
+ if s0 != '$':
+ return s
+ if s1 == '$':
+ return '$'
+ elif s1 in '()':
+ return s
+ else:
+ key = s[1:]
+ if key[0] == '{' or string.find(key, '.') >= 0:
+ if key[0] == '{':
+ key = key[1:-1]
+ try:
+ s = eval(key, self.gvars, lvars)
+ except KeyboardInterrupt:
+ raise
+ except Exception, e:
+ if e.__class__ in AllowableExceptions:
+ return ''
+ raise_exception(e, self.target, s)
+ else:
+ if lvars.has_key(key):
+ s = lvars[key]
+ elif self.gvars.has_key(key):
+ s = self.gvars[key]
+ elif not NameError in AllowableExceptions:
+ raise_exception(NameError(key), self.target, s)
+ else:
+ return ''
+
+ # Before re-expanding the result, handle
+ # recursive expansion by copying the local
+ # variable dictionary and overwriting a null
+ # string for the value of the variable name
+ # we just expanded.
+ #
+ # This could potentially be optimized by only
+ # copying lvars when s contains more expansions,
+ # but lvars is usually supposed to be pretty
+ # small, and deeply nested variable expansions
+ # are probably more the exception than the norm,
+ # so it should be tolerable for now.
+ lv = lvars.copy()
+ var = string.split(key, '.')[0]
+ lv[var] = ''
+ return self.substitute(s, lv)
+ elif is_Sequence(s):
+ def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars):
+ return conv(substitute(l, lvars))
+ return map(func, s)
+ elif callable(s):
+ try:
+ s = s(target=self.target,
+ source=self.source,
+ env=self.env,
+ for_signature=(self.mode != SUBST_CMD))
+ except TypeError:
+ # This probably indicates that it's a callable
+ # object that doesn't match our calling arguments
+ # (like an Action).
+ if self.mode == SUBST_RAW:
+ return s
+ s = self.conv(s)
+ return self.substitute(s, lvars)
+ elif s is None:
+ return ''
+ else:
+ return s
+
+ def substitute(self, args, lvars):
+ """Substitute expansions in an argument or list of arguments.
+
+ This serves as a wrapper for splitting up a string into
+ separate tokens.
+ """
+ if is_String(args) and not isinstance(args, CmdStringHolder):
+ args = str(args) # In case it's a UserString.
+ try:
+ def sub_match(match, conv=self.conv, expand=self.expand, lvars=lvars):
+ return conv(expand(match.group(1), lvars))
+ result = _dollar_exps.sub(sub_match, args)
+ except TypeError:
+ # If the internal conversion routine doesn't return
+ # strings (it could be overridden to return Nodes, for
+ # example), then the 1.5.2 re module will throw this
+ # exception. Back off to a slower, general-purpose
+ # algorithm that works for all data types.
+ args = _separate_args.findall(args)
+ result = []
+ for a in args:
+ result.append(self.conv(self.expand(a, lvars)))
+ if len(result) == 1:
+ result = result[0]
+ else:
+ result = string.join(map(str, result), '')
+ return result
+ else:
+ return self.expand(args, lvars)
+
+ if conv is None:
+ conv = _strconv[mode]
+
+ # Doing this every time is a bit of a waste, since the Executor
+ # has typically already populated the OverrideEnvironment with
+ # $TARGET/$SOURCE variables. We're keeping this (for now), though,
+ # because it supports existing behavior that allows us to call
+ # an Action directly with an arbitrary target+source pair, which
+ # we use in Tool/tex.py to handle calling $BIBTEX when necessary.
+ # If we dropped that behavior (or found another way to cover it),
+ # we could get rid of this call completely and just rely on the
+ # Executor setting the variables.
+ d = subst_dict(target, source)
+ if d:
+ lvars = lvars.copy()
+ lvars.update(d)
+
+ # We're (most likely) going to eval() things. If Python doesn't
+ # find a __builtins__ value in the global dictionary used for eval(),
+ # it copies the current global values for you. Avoid this by
+ # setting it explicitly and then deleting, so we don't pollute the
+ # construction environment Dictionary(ies) that are typically used
+ # for expansion.
+ gvars['__builtins__'] = __builtins__
+
+ ss = StringSubber(env, mode, target, source, conv, gvars)
+ result = ss.substitute(strSubst, lvars)
+
+ try:
+ del gvars['__builtins__']
+ except KeyError:
+ pass
+
+ if is_String(result):
+ # Remove $(-$) pairs and any stuff in between,
+ # if that's appropriate.
+ remove = _regex_remove[mode]
+ if remove:
+ result = remove.sub('', result)
+ if mode != SUBST_RAW:
+ # Compress strings of white space characters into
+ # a single space.
+ result = string.strip(_space_sep.sub(' ', result))
+ elif is_Sequence(result):
+ remove = _list_remove[mode]
+ if remove:
+ result = remove(result)
+
+ return result
+
+#Subst_List_Strings = {}
+
+def scons_subst_list(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None):
+ """Substitute construction variables in a string (or list or other
+ object) and separate the arguments into a command list.
+
+ The companion scons_subst() function (above) handles basic
+ substitutions within strings, so see that function instead
+ if that's what you're looking for.
+ """
+# try:
+# Subst_List_Strings[strSubst] = Subst_List_Strings[strSubst] + 1
+# except KeyError:
+# Subst_List_Strings[strSubst] = 1
+# import SCons.Debug
+# SCons.Debug.caller_trace(1)
+ class ListSubber(UserList.UserList):
+ """A class to construct the results of a scons_subst_list() call.
+
+ Like StringSubber, this class binds a specific construction
+ environment, mode, target and source with two methods
+ (substitute() and expand()) that handle the expansion.
+
+ In addition, however, this class is used to track the state of
+ the result(s) we're gathering so we can do the appropriate thing
+ whenever we have to append another word to the result--start a new
+ line, start a new word, append to the current word, etc. We do
+ this by setting the "append" attribute to the right method so
+ that our wrapper methods only need ever call ListSubber.append(),
+ and the rest of the object takes care of doing the right thing
+ internally.
+ """
+ def __init__(self, env, mode, target, source, conv, gvars):
+ UserList.UserList.__init__(self, [])
+ self.env = env
+ self.mode = mode
+ self.target = target
+ self.source = source
+ self.conv = conv
+ self.gvars = gvars
+
+ if self.mode == SUBST_RAW:
+ self.add_strip = lambda x, s=self: s.append(x)
+ else:
+ self.add_strip = lambda x, s=self: None
+ self.in_strip = None
+ self.next_line()
+
+ def expand(self, s, lvars, within_list):
+ """Expand a single "token" as necessary, appending the
+ expansion to the current result.
+
+ This handles expanding different types of things (strings,
+ lists, callables) appropriately. It calls the wrapper
+ substitute() method to re-expand things as necessary, so that
+ the results of expansions of side-by-side strings still get
+ re-evaluated separately, not smushed together.
+ """
+
+ if is_String(s):
+ try:
+ s0, s1 = s[:2]
+ except (IndexError, ValueError):
+ self.append(s)
+ return
+ if s0 != '$':
+ self.append(s)
+ return
+ if s1 == '$':
+ self.append('$')
+ elif s1 == '(':
+ self.open_strip('$(')
+ elif s1 == ')':
+ self.close_strip('$)')
+ else:
+ key = s[1:]
+ if key[0] == '{' or string.find(key, '.') >= 0:
+ if key[0] == '{':
+ key = key[1:-1]
+ try:
+ s = eval(key, self.gvars, lvars)
+ except KeyboardInterrupt:
+ raise
+ except Exception, e:
+ if e.__class__ in AllowableExceptions:
+ return
+ raise_exception(e, self.target, s)
+ else:
+ if lvars.has_key(key):
+ s = lvars[key]
+ elif self.gvars.has_key(key):
+ s = self.gvars[key]
+ elif not NameError in AllowableExceptions:
+ raise_exception(NameError(), self.target, s)
+ else:
+ return
+
+ # Before re-expanding the result, handle
+ # recursive expansion by copying the local
+ # variable dictionary and overwriting a null
+ # string for the value of the variable name
+ # we just expanded.
+ lv = lvars.copy()
+ var = string.split(key, '.')[0]
+ lv[var] = ''
+ self.substitute(s, lv, 0)
+ self.this_word()
+ elif is_Sequence(s):
+ for a in s:
+ self.substitute(a, lvars, 1)
+ self.next_word()
+ elif callable(s):
+ try:
+ s = s(target=self.target,
+ source=self.source,
+ env=self.env,
+ for_signature=(self.mode != SUBST_CMD))
+ except TypeError:
+ # This probably indicates that it's a callable
+ # object that doesn't match our calling arguments
+ # (like an Action).
+ if self.mode == SUBST_RAW:
+ self.append(s)
+ return
+ s = self.conv(s)
+ self.substitute(s, lvars, within_list)
+ elif s is None:
+ self.this_word()
+ else:
+ self.append(s)
+
+ def substitute(self, args, lvars, within_list):
+ """Substitute expansions in an argument or list of arguments.
+
+ This serves as a wrapper for splitting up a string into
+ separate tokens.
+ """
+
+ if is_String(args) and not isinstance(args, CmdStringHolder):
+ args = str(args) # In case it's a UserString.
+ args = _separate_args.findall(args)
+ for a in args:
+ if a[0] in ' \t\n\r\f\v':
+ if '\n' in a:
+ self.next_line()
+ elif within_list:
+ self.append(a)
+ else:
+ self.next_word()
+ else:
+ self.expand(a, lvars, within_list)
+ else:
+ self.expand(args, lvars, within_list)
+
+ def next_line(self):
+ """Arrange for the next word to start a new line. This
+ is like starting a new word, except that we have to append
+ another line to the result."""
+ UserList.UserList.append(self, [])
+ self.next_word()
+
+ def this_word(self):
+ """Arrange for the next word to append to the end of the
+ current last word in the result."""
+ self.append = self.add_to_current_word
+
+ def next_word(self):
+ """Arrange for the next word to start a new word."""
+ self.append = self.add_new_word
+
+ def add_to_current_word(self, x):
+ """Append the string x to the end of the current last word
+ in the result. If that is not possible, then just add
+ it as a new word. Make sure the entire concatenated string
+ inherits the object attributes of x (in particular, the
+ escape function) by wrapping it as CmdStringHolder."""
+
+ if not self.in_strip or self.mode != SUBST_SIG:
+ try:
+ current_word = self[-1][-1]
+ except IndexError:
+ self.add_new_word(x)
+ else:
+ # All right, this is a hack and it should probably
+ # be refactored out of existence in the future.
+ # The issue is that we want to smoosh words together
+ # and make one file name that gets escaped if
+ # we're expanding something like foo$EXTENSION,
+ # but we don't want to smoosh them together if
+ # it's something like >$TARGET, because then we'll
+ # treat the '>' like it's part of the file name.
+ # So for now, just hard-code looking for the special
+ # command-line redirection characters...
+ try:
+ last_char = str(current_word)[-1]
+ except IndexError:
+ last_char = '\0'
+ if last_char in '<>|':
+ self.add_new_word(x)
+ else:
+ y = current_word + x
+
+ # We used to treat a word appended to a literal
+ # as a literal itself, but this caused problems
+ # with interpreting quotes around space-separated
+ # targets on command lines. Removing this makes
+ # none of the "substantive" end-to-end tests fail,
+ # so we'll take this out but leave it commented
+ # for now in case there's a problem not covered
+ # by the test cases and we need to resurrect this.
+ #literal1 = self.literal(self[-1][-1])
+ #literal2 = self.literal(x)
+ y = self.conv(y)
+ if is_String(y):
+ #y = CmdStringHolder(y, literal1 or literal2)
+ y = CmdStringHolder(y, None)
+ self[-1][-1] = y
+
+ def add_new_word(self, x):
+ if not self.in_strip or self.mode != SUBST_SIG:
+ literal = self.literal(x)
+ x = self.conv(x)
+ if is_String(x):
+ x = CmdStringHolder(x, literal)
+ self[-1].append(x)
+ self.append = self.add_to_current_word
+
+ def literal(self, x):
+ try:
+ l = x.is_literal
+ except AttributeError:
+ return None
+ else:
+ return l()
+
+ def open_strip(self, x):
+ """Handle the "open strip" $( token."""
+ self.add_strip(x)
+ self.in_strip = 1
+
+ def close_strip(self, x):
+ """Handle the "close strip" $) token."""
+ self.add_strip(x)
+ self.in_strip = None
+
+ if conv is None:
+ conv = _strconv[mode]
+
+ # Doing this every time is a bit of a waste, since the Executor
+ # has typically already populated the OverrideEnvironment with
+ # $TARGET/$SOURCE variables. We're keeping this (for now), though,
+ # because it supports existing behavior that allows us to call
+ # an Action directly with an arbitrary target+source pair, which
+ # we use in Tool/tex.py to handle calling $BIBTEX when necessary.
+ # If we dropped that behavior (or found another way to cover it),
+ # we could get rid of this call completely and just rely on the
+ # Executor setting the variables.
+ d = subst_dict(target, source)
+ if d:
+ lvars = lvars.copy()
+ lvars.update(d)
+
+ # We're (most likely) going to eval() things. If Python doesn't
+ # find a __builtins__ value in the global dictionary used for eval(),
+ # it copies the current global values for you. Avoid this by
+ # setting it explicitly and then deleting, so we don't pollute the
+ # construction environment Dictionary(ies) that are typically used
+ # for expansion.
+ gvars['__builtins__'] = __builtins__
+
+ ls = ListSubber(env, mode, target, source, conv, gvars)
+ ls.substitute(strSubst, lvars, 0)
+
+ try:
+ del gvars['__builtins__']
+ except KeyError:
+ pass
+
+ return ls.data
+
+def scons_subst_once(strSubst, env, key):
+ """Perform single (non-recursive) substitution of a single
+ construction variable keyword.
+
+ This is used when setting a variable when copying or overriding values
+ in an Environment. We want to capture (expand) the old value before
+ we override it, so people can do things like:
+
+ env2 = env.Clone(CCFLAGS = '$CCFLAGS -g')
+
+ We do this with some straightforward, brute-force code here...
+ """
+ if type(strSubst) == types.StringType and string.find(strSubst, '$') < 0:
+ return strSubst
+
+ matchlist = ['$' + key, '${' + key + '}']
+ val = env.get(key, '')
+ def sub_match(match, val=val, matchlist=matchlist):
+ a = match.group(1)
+ if a in matchlist:
+ a = val
+ if is_Sequence(a):
+ return string.join(map(str, a))
+ else:
+ return str(a)
+
+ if is_Sequence(strSubst):
+ result = []
+ for arg in strSubst:
+ if is_String(arg):
+ if arg in matchlist:
+ arg = val
+ if is_Sequence(arg):
+ result.extend(arg)
+ else:
+ result.append(arg)
+ else:
+ result.append(_dollar_exps.sub(sub_match, arg))
+ else:
+ result.append(arg)
+ return result
+ elif is_String(strSubst):
+ return _dollar_exps.sub(sub_match, strSubst)
+ else:
+ return strSubst
diff --git a/tools/scons/scons-local-1.2.0/SCons/Taskmaster.py b/tools/scons/scons-local-1.2.0/SCons/Taskmaster.py
new file mode 100644
index 000000000..354fcca4f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Taskmaster.py
@@ -0,0 +1,985 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = """
+Generic Taskmaster module for the SCons build engine.
+
+This module contains the primary interface(s) between a wrapping user
+interface and the SCons build engine. There are two key classes here:
+
+ Taskmaster
+ This is the main engine for walking the dependency graph and
+ calling things to decide what does or doesn't need to be built.
+
+ Task
+ This is the base class for allowing a wrapping interface to
+ decide what does or doesn't actually need to be done. The
+ intention is for a wrapping interface to subclass this as
+ appropriate for different types of behavior it may need.
+
+ The canonical example is the SCons native Python interface,
+ which has Task subclasses that handle its specific behavior,
+ like printing "`foo' is up to date" when a top-level target
+ doesn't need to be built, and handling the -c option by removing
+ targets as its "build" action. There is also a separate subclass
+ for suppressing this output when the -q option is used.
+
+ The Taskmaster instantiates a Task object for each (set of)
+ target(s) that it decides need to be evaluated and/or built.
+"""
+
+__revision__ = "src/engine/SCons/Taskmaster.py 3842 2008/12/20 22:59:52 scons"
+
+from itertools import chain
+import operator
+import string
+import sys
+import traceback
+
+import SCons.Errors
+import SCons.Node
+
+StateString = SCons.Node.StateString
+NODE_NO_STATE = SCons.Node.no_state
+NODE_PENDING = SCons.Node.pending
+NODE_EXECUTING = SCons.Node.executing
+NODE_UP_TO_DATE = SCons.Node.up_to_date
+NODE_EXECUTED = SCons.Node.executed
+NODE_FAILED = SCons.Node.failed
+
+
+# A subsystem for recording stats about how different Nodes are handled by
+# the main Taskmaster loop. There's no external control here (no need for
+# a --debug= option); enable it by changing the value of CollectStats.
+
+CollectStats = None
+
+class Stats:
+ """
+ A simple class for holding statistics about the disposition of a
+ Node by the Taskmaster. If we're collecting statistics, each Node
+ processed by the Taskmaster gets one of these attached, in which case
+ the Taskmaster records its decision each time it processes the Node.
+ (Ideally, that's just once per Node.)
+ """
+ def __init__(self):
+ """
+ Instantiates a Taskmaster.Stats object, initializing all
+ appropriate counters to zero.
+ """
+ self.considered = 0
+ self.already_handled = 0
+ self.problem = 0
+ self.child_failed = 0
+ self.not_built = 0
+ self.side_effects = 0
+ self.build = 0
+
+StatsNodes = []
+
+fmt = "%(considered)3d "\
+ "%(already_handled)3d " \
+ "%(problem)3d " \
+ "%(child_failed)3d " \
+ "%(not_built)3d " \
+ "%(side_effects)3d " \
+ "%(build)3d "
+
+def dump_stats():
+ StatsNodes.sort(lambda a, b: cmp(str(a), str(b)))
+ for n in StatsNodes:
+ print (fmt % n.stats.__dict__) + str(n)
+
+
+
+class Task:
+ """
+ Default SCons build engine task.
+
+ This controls the interaction of the actual building of node
+ and the rest of the engine.
+
+ This is expected to handle all of the normally-customizable
+ aspects of controlling a build, so any given application
+ *should* be able to do what it wants by sub-classing this
+ class and overriding methods as appropriate. If an application
+ needs to customze something by sub-classing Taskmaster (or
+ some other build engine class), we should first try to migrate
+ that functionality into this class.
+
+ Note that it's generally a good idea for sub-classes to call
+ these methods explicitly to update state, etc., rather than
+ roll their own interaction with Taskmaster from scratch.
+ """
+ def __init__(self, tm, targets, top, node):
+ self.tm = tm
+ self.targets = targets
+ self.top = top
+ self.node = node
+ self.exc_clear()
+
+ def trace_message(self, method, node, description='node'):
+ fmt = '%-20s %s %s\n'
+ return fmt % (method + ':', description, self.tm.trace_node(node))
+
+ def display(self, message):
+ """
+ Hook to allow the calling interface to display a message.
+
+ This hook gets called as part of preparing a task for execution
+ (that is, a Node to be built). As part of figuring out what Node
+ should be built next, the actually target list may be altered,
+ along with a message describing the alteration. The calling
+ interface can subclass Task and provide a concrete implementation
+ of this method to see those messages.
+ """
+ pass
+
+ def prepare(self):
+ """
+ Called just before the task is executed.
+
+ This is mainly intended to give the target Nodes a chance to
+ unlink underlying files and make all necessary directories before
+ the Action is actually called to build the targets.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.prepare()', self.node))
+
+ # Now that it's the appropriate time, give the TaskMaster a
+ # chance to raise any exceptions it encountered while preparing
+ # this task.
+ self.exception_raise()
+
+ if self.tm.message:
+ self.display(self.tm.message)
+ self.tm.message = None
+
+ # Let the targets take care of any necessary preparations.
+ # This includes verifying that all of the necessary sources
+ # and dependencies exist, removing the target file(s), etc.
+ #
+ # As of April 2008, the get_executor().prepare() method makes
+ # sure that all of the aggregate sources necessary to build this
+ # Task's target(s) exist in one up-front check. The individual
+ # target t.prepare() methods check that each target's explicit
+ # or implicit dependencies exists, and also initialize the
+ # .sconsign info.
+ self.targets[0].get_executor().prepare()
+ for t in self.targets:
+ t.prepare()
+ for s in t.side_effects:
+ s.prepare()
+
+ def get_target(self):
+ """Fetch the target being built or updated by this task.
+ """
+ return self.node
+
+ def needs_execute(self):
+ """
+ Called to determine whether the task's execute() method should
+ be run.
+
+ This method allows one to skip the somethat costly execution
+ of the execute() method in a seperate thread. For example,
+ that would be unnecessary for up-to-date targets.
+ """
+ return True
+
+ def execute(self):
+ """
+ Called to execute the task.
+
+ This method is called from multiple threads in a parallel build,
+ so only do thread safe stuff here. Do thread unsafe stuff in
+ prepare(), executed() or failed().
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.execute()', self.node))
+
+ try:
+ everything_was_cached = 1
+ for t in self.targets:
+ if not t.retrieve_from_cache():
+ everything_was_cached = 0
+ break
+ if not everything_was_cached:
+ self.targets[0].build()
+ except SystemExit:
+ exc_value = sys.exc_info()[1]
+ raise SCons.Errors.ExplicitExit(self.targets[0], exc_value.code)
+ except SCons.Errors.UserError:
+ raise
+ except SCons.Errors.BuildError:
+ raise
+ except Exception, e:
+ buildError = SCons.Errors.convert_to_BuildError(e)
+ buildError.node = self.targets[0]
+ buildError.exc_info = sys.exc_info()
+ raise buildError
+
+ def executed_without_callbacks(self):
+ """
+ Called when the task has been successfully executed
+ and the Taskmaster instance doesn't want to call
+ the Node's callback methods.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.executed_without_callbacks()',
+ self.node))
+
+ for t in self.targets:
+ if t.get_state() == NODE_EXECUTING:
+ for side_effect in t.side_effects:
+ side_effect.set_state(NODE_NO_STATE)
+ t.set_state(NODE_EXECUTED)
+
+ def executed_with_callbacks(self):
+ """
+ Called when the task has been successfully executed and
+ the Taskmaster instance wants to call the Node's callback
+ methods.
+
+ This may have been a do-nothing operation (to preserve build
+ order), so we must check the node's state before deciding whether
+ it was "built", in which case we call the appropriate Node method.
+ In any event, we always call "visited()", which will handle any
+ post-visit actions that must take place regardless of whether
+ or not the target was an actual built target or a source Node.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.executed_with_callbacks()',
+ self.node))
+
+ for t in self.targets:
+ if t.get_state() == NODE_EXECUTING:
+ for side_effect in t.side_effects:
+ side_effect.set_state(NODE_NO_STATE)
+ t.set_state(NODE_EXECUTED)
+ t.built()
+ t.visited()
+
+ executed = executed_with_callbacks
+
+ def failed(self):
+ """
+ Default action when a task fails: stop the build.
+
+ Note: Although this function is normally invoked on nodes in
+ the executing state, it might also be invoked on up-to-date
+ nodes when using Configure().
+ """
+ self.fail_stop()
+
+ def fail_stop(self):
+ """
+ Explicit stop-the-build failure.
+
+ This sets failure status on the target nodes and all of
+ their dependent parent nodes.
+
+ Note: Although this function is normally invoked on nodes in
+ the executing state, it might also be invoked on up-to-date
+ nodes when using Configure().
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.failed_stop()', self.node))
+
+ # Invoke will_not_build() to clean-up the pending children
+ # list.
+ self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
+
+ # Tell the taskmaster to not start any new tasks
+ self.tm.stop()
+
+ # We're stopping because of a build failure, but give the
+ # calling Task class a chance to postprocess() the top-level
+ # target under which the build failure occurred.
+ self.targets = [self.tm.current_top]
+ self.top = 1
+
+ def fail_continue(self):
+ """
+ Explicit continue-the-build failure.
+
+ This sets failure status on the target nodes and all of
+ their dependent parent nodes.
+
+ Note: Although this function is normally invoked on nodes in
+ the executing state, it might also be invoked on up-to-date
+ nodes when using Configure().
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.failed_continue()', self.node))
+
+ self.tm.will_not_build(self.targets, lambda n: n.set_state(NODE_FAILED))
+
+ def make_ready_all(self):
+ """
+ Marks all targets in a task ready for execution.
+
+ This is used when the interface needs every target Node to be
+ visited--the canonical example being the "scons -c" option.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.make_ready_all()', self.node))
+
+ self.out_of_date = self.targets[:]
+ for t in self.targets:
+ t.disambiguate().set_state(NODE_EXECUTING)
+ for s in t.side_effects:
+ s.set_state(NODE_EXECUTING)
+
+ def make_ready_current(self):
+ """
+ Marks all targets in a task ready for execution if any target
+ is not current.
+
+ This is the default behavior for building only what's necessary.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.make_ready_current()',
+ self.node))
+
+ self.out_of_date = []
+ needs_executing = False
+ for t in self.targets:
+ try:
+ t.disambiguate().make_ready()
+ is_up_to_date = not t.has_builder() or \
+ (not t.always_build and t.is_up_to_date())
+ except EnvironmentError, e:
+ raise SCons.Errors.BuildError(node=t, errstr=e.strerror, filename=e.filename)
+
+ if not is_up_to_date:
+ self.out_of_date.append(t)
+ needs_executing = True
+
+ if needs_executing:
+ for t in self.targets:
+ t.set_state(NODE_EXECUTING)
+ for s in t.side_effects:
+ s.set_state(NODE_EXECUTING)
+ else:
+ for t in self.targets:
+ # We must invoke visited() to ensure that the node
+ # information has been computed before allowing the
+ # parent nodes to execute. (That could occur in a
+ # parallel build...)
+ t.visited()
+ t.set_state(NODE_UP_TO_DATE)
+
+ make_ready = make_ready_current
+
+ def postprocess(self):
+ """
+ Post-processes a task after it's been executed.
+
+ This examines all the targets just built (or not, we don't care
+ if the build was successful, or even if there was no build
+ because everything was up-to-date) to see if they have any
+ waiting parent Nodes, or Nodes waiting on a common side effect,
+ that can be put back on the candidates list.
+ """
+ T = self.tm.trace
+ if T: T.write(self.trace_message('Task.postprocess()', self.node))
+
+ # We may have built multiple targets, some of which may have
+ # common parents waiting for this build. Count up how many
+ # targets each parent was waiting for so we can subtract the
+ # values later, and so we *don't* put waiting side-effect Nodes
+ # back on the candidates list if the Node is also a waiting
+ # parent.
+
+ targets = set(self.targets)
+
+ pending_children = self.tm.pending_children
+ parents = {}
+ for t in targets:
+ # A node can only be in the pending_children set if it has
+ # some waiting_parents.
+ if t.waiting_parents:
+ if T: T.write(self.trace_message('Task.postprocess()',
+ t,
+ 'removing'))
+ pending_children.discard(t)
+ for p in t.waiting_parents:
+ parents[p] = parents.get(p, 0) + 1
+
+ for t in targets:
+ for s in t.side_effects:
+ if s.get_state() == NODE_EXECUTING:
+ s.set_state(NODE_NO_STATE)
+ for p in s.waiting_parents:
+ parents[p] = parents.get(p, 0) + 1
+ for p in s.waiting_s_e:
+ if p.ref_count == 0:
+ self.tm.candidates.append(p)
+
+ for p, subtract in parents.items():
+ p.ref_count = p.ref_count - subtract
+ if T: T.write(self.trace_message('Task.postprocess()',
+ p,
+ 'adjusted parent ref count'))
+ if p.ref_count == 0:
+ self.tm.candidates.append(p)
+
+ for t in targets:
+ t.postprocess()
+
+ # Exception handling subsystem.
+ #
+ # Exceptions that occur while walking the DAG or examining Nodes
+ # must be raised, but must be raised at an appropriate time and in
+ # a controlled manner so we can, if necessary, recover gracefully,
+ # possibly write out signature information for Nodes we've updated,
+ # etc. This is done by having the Taskmaster tell us about the
+ # exception, and letting
+
+ def exc_info(self):
+ """
+ Returns info about a recorded exception.
+ """
+ return self.exception
+
+ def exc_clear(self):
+ """
+ Clears any recorded exception.
+
+ This also changes the "exception_raise" attribute to point
+ to the appropriate do-nothing method.
+ """
+ self.exception = (None, None, None)
+ self.exception_raise = self._no_exception_to_raise
+
+ def exception_set(self, exception=None):
+ """
+ Records an exception to be raised at the appropriate time.
+
+ This also changes the "exception_raise" attribute to point
+ to the method that will, in fact
+ """
+ if not exception:
+ exception = sys.exc_info()
+ self.exception = exception
+ self.exception_raise = self._exception_raise
+
+ def _no_exception_to_raise(self):
+ pass
+
+ def _exception_raise(self):
+ """
+ Raises a pending exception that was recorded while getting a
+ Task ready for execution.
+ """
+ exc = self.exc_info()[:]
+ try:
+ exc_type, exc_value, exc_traceback = exc
+ except ValueError:
+ exc_type, exc_value = exc
+ exc_traceback = None
+ raise exc_type, exc_value, exc_traceback
+
+
+def find_cycle(stack, visited):
+ if stack[-1] in visited:
+ return None
+ visited.add(stack[-1])
+ for n in stack[-1].waiting_parents:
+ stack.append(n)
+ if stack[0] == stack[-1]:
+ return stack
+ if find_cycle(stack, visited):
+ return stack
+ stack.pop()
+ return None
+
+
+class Taskmaster:
+ """
+ The Taskmaster for walking the dependency DAG.
+ """
+
+ def __init__(self, targets=[], tasker=Task, order=None, trace=None):
+ self.original_top = targets
+ self.top_targets_left = targets[:]
+ self.top_targets_left.reverse()
+ self.candidates = []
+ self.tasker = tasker
+ if not order:
+ order = lambda l: l
+ self.order = order
+ self.message = None
+ self.trace = trace
+ self.next_candidate = self.find_next_candidate
+ self.pending_children = set()
+
+ def find_next_candidate(self):
+ """
+ Returns the next candidate Node for (potential) evaluation.
+
+ The candidate list (really a stack) initially consists of all of
+ the top-level (command line) targets provided when the Taskmaster
+ was initialized. While we walk the DAG, visiting Nodes, all the
+ children that haven't finished processing get pushed on to the
+ candidate list. Each child can then be popped and examined in
+ turn for whether *their* children are all up-to-date, in which
+ case a Task will be created for their actual evaluation and
+ potential building.
+
+ Here is where we also allow candidate Nodes to alter the list of
+ Nodes that should be examined. This is used, for example, when
+ invoking SCons in a source directory. A source directory Node can
+ return its corresponding build directory Node, essentially saying,
+ "Hey, you really need to build this thing over here instead."
+ """
+ try:
+ return self.candidates.pop()
+ except IndexError:
+ pass
+ try:
+ node = self.top_targets_left.pop()
+ except IndexError:
+ return None
+ self.current_top = node
+ alt, message = node.alter_targets()
+ if alt:
+ self.message = message
+ self.candidates.append(node)
+ self.candidates.extend(self.order(alt))
+ node = self.candidates.pop()
+ return node
+
+ def no_next_candidate(self):
+ """
+ Stops Taskmaster processing by not returning a next candidate.
+
+ Note that we have to clean-up the Taskmaster candidate list
+ because the cycle detection depends on the fact all nodes have
+ been processed somehow.
+ """
+ while self.candidates:
+ candidates = self.candidates
+ self.candidates = []
+ self.will_not_build(candidates)
+ return None
+
+ def _validate_pending_children(self):
+ """
+ Validate the content of the pending_children set. Assert if an
+ internal error is found.
+
+ This function is used strictly for debugging the taskmaster by
+ checking that no invariants are violated. It is not used in
+ normal operation.
+
+ The pending_children set is used to detect cycles in the
+ dependency graph. We call a "pending child" a child that is
+ found in the "pending" state when checking the dependencies of
+ its parent node.
+
+ A pending child can occur when the Taskmaster completes a loop
+ through a cycle. For example, lets imagine a graph made of
+ three node (A, B and C) making a cycle. The evaluation starts
+ at node A. The taskmaster first consider whether node A's
+ child B is up-to-date. Then, recursively, node B needs to
+ check whether node C is up-to-date. This leaves us with a
+ dependency graph looking like:
+
+ Next candidate \
+ \
+ Node A (Pending) --> Node B(Pending) --> Node C (NoState)
+ ^ |
+ | |
+ +-------------------------------------+
+
+ Now, when the Taskmaster examines the Node C's child Node A,
+ it finds that Node A is in the "pending" state. Therefore,
+ Node A is a pending child of node C.
+
+ Pending children indicate that the Taskmaster has potentially
+ loop back through a cycle. We say potentially because it could
+ also occur when a DAG is evaluated in parallel. For example,
+ consider the following graph:
+
+
+ Node A (Pending) --> Node B(Pending) --> Node C (Pending) --> ...
+ | ^
+ | |
+ +----------> Node D (NoState) --------+
+ /
+ Next candidate /
+
+ The Taskmaster first evaluates the nodes A, B, and C and
+ starts building some children of node C. Assuming, that the
+ maximum parallel level has not been reached, the Taskmaster
+ will examine Node D. It will find that Node C is a pending
+ child of Node D.
+
+ In summary, evaluating a graph with a cycle will always
+ involve a pending child at one point. A pending child might
+ indicate either a cycle or a diamond-shaped DAG. Only a
+ fraction of the nodes ends-up being a "pending child" of
+ another node. This keeps the pending_children set small in
+ practice.
+
+ We can differentiate between the two cases if we wait until
+ the end of the build. At this point, all the pending children
+ nodes due to a diamond-shaped DAG will have been properly
+ built (or will have failed to build). But, the pending
+ children involved in a cycle will still be in the pending
+ state.
+
+ The taskmaster removes nodes from the pending_children set as
+ soon as a pending_children node moves out of the pending
+ state. This also helps to keep the pending_children set small.
+ """
+
+ for n in self.pending_children:
+ assert n.state in (NODE_PENDING, NODE_EXECUTING), \
+ (str(n), StateString[n.state])
+ assert len(n.waiting_parents) != 0, (str(n), len(n.waiting_parents))
+ for p in n.waiting_parents:
+ assert p.ref_count > 0, (str(n), str(p), p.ref_count)
+
+
+ def trace_message(self, message):
+ return 'Taskmaster: %s\n' % message
+
+ def trace_node(self, node):
+ return '<%-10s %-3s %s>' % (StateString[node.get_state()],
+ node.ref_count,
+ repr(str(node)))
+
+ def _find_next_ready_node(self):
+ """
+ Finds the next node that is ready to be built.
+
+ This is *the* main guts of the DAG walk. We loop through the
+ list of candidates, looking for something that has no un-built
+ children (i.e., that is a leaf Node or has dependencies that are
+ all leaf Nodes or up-to-date). Candidate Nodes are re-scanned
+ (both the target Node itself and its sources, which are always
+ scanned in the context of a given target) to discover implicit
+ dependencies. A Node that must wait for some children to be
+ built will be put back on the candidates list after the children
+ have finished building. A Node that has been put back on the
+ candidates list in this way may have itself (or its sources)
+ re-scanned, in order to handle generated header files (e.g.) and
+ the implicit dependencies therein.
+
+ Note that this method does not do any signature calculation or
+ up-to-date check itself. All of that is handled by the Task
+ class. This is purely concerned with the dependency graph walk.
+ """
+
+ self.ready_exc = None
+
+ T = self.trace
+ if T: T.write('\n' + self.trace_message('Looking for a node to evaluate'))
+
+ while 1:
+ node = self.next_candidate()
+ if node is None:
+ if T: T.write(self.trace_message('No candidate anymore.') + '\n')
+ return None
+
+ node = node.disambiguate()
+ state = node.get_state()
+
+ # For debugging only:
+ #
+ # try:
+ # self._validate_pending_children()
+ # except:
+ # self.ready_exc = sys.exc_info()
+ # return node
+
+ if CollectStats:
+ if not hasattr(node, 'stats'):
+ node.stats = Stats()
+ StatsNodes.append(node)
+ S = node.stats
+ S.considered = S.considered + 1
+ else:
+ S = None
+
+ if T: T.write(self.trace_message(' Considering node %s and its children:' % self.trace_node(node)))
+
+ if state == NODE_NO_STATE:
+ # Mark this node as being on the execution stack:
+ node.set_state(NODE_PENDING)
+ elif state > NODE_PENDING:
+ # Skip this node if it has already been evaluated:
+ if S: S.already_handled = S.already_handled + 1
+ if T: T.write(self.trace_message(' already handled (executed)'))
+ continue
+
+ try:
+ children = node.children()
+ except SystemExit:
+ exc_value = sys.exc_info()[1]
+ e = SCons.Errors.ExplicitExit(node, exc_value.code)
+ self.ready_exc = (SCons.Errors.ExplicitExit, e)
+ if T: T.write(self.trace_message(' SystemExit'))
+ return node
+ except Exception, e:
+ # We had a problem just trying to figure out the
+ # children (like a child couldn't be linked in to a
+ # VariantDir, or a Scanner threw something). Arrange to
+ # raise the exception when the Task is "executed."
+ self.ready_exc = sys.exc_info()
+ if S: S.problem = S.problem + 1
+ if T: T.write(self.trace_message(' exception %s while scanning children.\n' % e))
+ return node
+
+ children_not_visited = []
+ children_pending = set()
+ children_not_ready = []
+ children_failed = False
+
+ for child in chain(children,node.prerequisites):
+ childstate = child.get_state()
+
+ if T: T.write(self.trace_message(' ' + self.trace_node(child)))
+
+ if childstate == NODE_NO_STATE:
+ children_not_visited.append(child)
+ elif childstate == NODE_PENDING:
+ children_pending.add(child)
+ elif childstate == NODE_FAILED:
+ children_failed = True
+
+ if childstate <= NODE_EXECUTING:
+ children_not_ready.append(child)
+
+
+ # These nodes have not even been visited yet. Add
+ # them to the list so that on some next pass we can
+ # take a stab at evaluating them (or their children).
+ children_not_visited.reverse()
+ self.candidates.extend(self.order(children_not_visited))
+ #if T and children_not_visited:
+ # T.write(self.trace_message(' adding to candidates: %s' % map(str, children_not_visited)))
+ # T.write(self.trace_message(' candidates now: %s\n' % map(str, self.candidates)))
+
+ # Skip this node if any of its children have failed.
+ #
+ # This catches the case where we're descending a top-level
+ # target and one of our children failed while trying to be
+ # built by a *previous* descent of an earlier top-level
+ # target.
+ #
+ # It can also occur if a node is reused in multiple
+ # targets. One first descends though the one of the
+ # target, the next time occurs through the other target.
+ #
+ # Note that we can only have failed_children if the
+ # --keep-going flag was used, because without it the build
+ # will stop before diving in the other branch.
+ #
+ # Note that even if one of the children fails, we still
+ # added the other children to the list of candidate nodes
+ # to keep on building (--keep-going).
+ if children_failed:
+ node.set_state(NODE_FAILED)
+
+ if S: S.child_failed = S.child_failed + 1
+ if T: T.write(self.trace_message('****** %s\n' % self.trace_node(node)))
+ continue
+
+ if children_not_ready:
+ for child in children_not_ready:
+ # We're waiting on one or more derived targets
+ # that have not yet finished building.
+ if S: S.not_built = S.not_built + 1
+
+ # Add this node to the waiting parents lists of
+ # anything we're waiting on, with a reference
+ # count so we can be put back on the list for
+ # re-evaluation when they've all finished.
+ node.ref_count = node.ref_count + child.add_to_waiting_parents(node)
+ if T: T.write(self.trace_message(' adjusted ref count: %s, child %s' %
+ (self.trace_node(node), repr(str(child)))))
+
+ if T:
+ for pc in children_pending:
+ T.write(self.trace_message(' adding %s to the pending children set\n' %
+ self.trace_node(pc)))
+ self.pending_children = self.pending_children | children_pending
+
+ continue
+
+ # Skip this node if it has side-effects that are
+ # currently being built:
+ wait_side_effects = False
+ for se in node.side_effects:
+ if se.get_state() == NODE_EXECUTING:
+ se.add_to_waiting_s_e(node)
+ wait_side_effects = True
+
+ if wait_side_effects:
+ if S: S.side_effects = S.side_effects + 1
+ continue
+
+ # The default when we've gotten through all of the checks above:
+ # this node is ready to be built.
+ if S: S.build = S.build + 1
+ if T: T.write(self.trace_message('Evaluating %s\n' %
+ self.trace_node(node)))
+
+ # For debugging only:
+ #
+ # try:
+ # self._validate_pending_children()
+ # except:
+ # self.ready_exc = sys.exc_info()
+ # return node
+
+ return node
+
+ return None
+
+ def next_task(self):
+ """
+ Returns the next task to be executed.
+
+ This simply asks for the next Node to be evaluated, and then wraps
+ it in the specific Task subclass with which we were initialized.
+ """
+ node = self._find_next_ready_node()
+
+ if node is None:
+ return None
+
+ tlist = node.get_executor().targets
+
+ task = self.tasker(self, tlist, node in self.original_top, node)
+ try:
+ task.make_ready()
+ except:
+ # We had a problem just trying to get this task ready (like
+ # a child couldn't be linked in to a VariantDir when deciding
+ # whether this node is current). Arrange to raise the
+ # exception when the Task is "executed."
+ self.ready_exc = sys.exc_info()
+
+ if self.ready_exc:
+ task.exception_set(self.ready_exc)
+
+ self.ready_exc = None
+
+ return task
+
+ def will_not_build(self, nodes, node_func=lambda n: None):
+ """
+ Perform clean-up about nodes that will never be built. Invokes
+ a user defined function on all of these nodes (including all
+ of their parents).
+ """
+
+ T = self.trace
+
+ pending_children = self.pending_children
+
+ to_visit = set(nodes)
+ pending_children = pending_children - to_visit
+
+ if T:
+ for n in nodes:
+ T.write(self.trace_message(' removing node %s from the pending children set\n' %
+ self.trace_node(n)))
+ try:
+ while 1:
+ try:
+ node = to_visit.pop()
+ except AttributeError:
+ # Python 1.5.2
+ if len(to_visit):
+ node = to_visit[0]
+ to_visit.remove(node)
+ else:
+ break
+
+ node_func(node)
+
+ # Prune recursion by flushing the waiting children
+ # list immediately.
+ parents = node.waiting_parents
+ node.waiting_parents = set()
+
+ to_visit = to_visit | parents
+ pending_children = pending_children - parents
+
+ for p in parents:
+ p.ref_count = p.ref_count - 1
+ if T: T.write(self.trace_message(' removing parent %s from the pending children set\n' %
+ self.trace_node(p)))
+ except KeyError:
+ # The container to_visit has been emptied.
+ pass
+
+ # We have the stick back the pending_children list into the
+ # task master because the python 1.5.2 compatibility does not
+ # allow us to use in-place updates
+ self.pending_children = pending_children
+
+ def stop(self):
+ """
+ Stops the current build completely.
+ """
+ self.next_candidate = self.no_next_candidate
+
+ def cleanup(self):
+ """
+ Check for dependency cycles.
+ """
+ if not self.pending_children:
+ return
+
+ # TODO(1.5)
+ #nclist = [ (n, find_cycle([n], set())) for n in self.pending_children ]
+ nclist = map(lambda n: (n, find_cycle([n], set())), self.pending_children)
+
+ # TODO(1.5)
+ #genuine_cycles = [
+ # node for node, cycle in nclist
+ # if cycle or node.get_state() != NODE_EXECUTED
+ #]
+ genuine_cycles = filter(lambda t: t[1] or t[0].get_state() != NODE_EXECUTED, nclist)
+ if not genuine_cycles:
+ # All of the "cycles" found were single nodes in EXECUTED state,
+ # which is to say, they really weren't cycles. Just return.
+ return
+
+ desc = 'Found dependency cycle(s):\n'
+ for node, cycle in nclist:
+ if cycle:
+ desc = desc + " " + string.join(map(str, cycle), " -> ") + "\n"
+ else:
+ desc = desc + \
+ " Internal Error: no cycle found for node %s (%s) in state %s\n" % \
+ (node, repr(node), StateString[node.get_state()])
+
+ raise SCons.Errors.UserError, desc
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/386asm.py b/tools/scons/scons-local-1.2.0/SCons/Tool/386asm.py
new file mode 100644
index 000000000..fc5c50004
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/386asm.py
@@ -0,0 +1,55 @@
+"""SCons.Tool.386asm
+
+Tool specification for the 386ASM assembler for the Phar Lap ETS embedded
+operating system.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/386asm.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.PharLapCommon import addPharLapPaths
+import SCons.Util
+
+as_module = __import__('as', globals(), locals(), [])
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ as_module.generate(env)
+
+ env['AS'] = '386asm'
+ env['ASFLAGS'] = SCons.Util.CLVar('')
+ env['ASPPFLAGS'] = '$ASFLAGS'
+ env['ASCOM'] = '$AS $ASFLAGS $SOURCES -o $TARGET'
+ env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $SOURCES -o $TARGET'
+
+ addPharLapPaths(env)
+
+def exists(env):
+ return env.Detect('386asm')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/BitKeeper.py b/tools/scons/scons-local-1.2.0/SCons/Tool/BitKeeper.py
new file mode 100644
index 000000000..15d1f0ad3
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/BitKeeper.py
@@ -0,0 +1,59 @@
+"""SCons.Tool.BitKeeper.py
+
+Tool-specific initialization for the BitKeeper source code control
+system.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/BitKeeper.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ BitKeeper to an Environment."""
+
+ def BitKeeperFactory(env=env):
+ """ """
+ act = SCons.Action.Action("$BITKEEPERCOM", "$BITKEEPERCOMSTR")
+ return SCons.Builder.Builder(action = act, env = env)
+
+ #setattr(env, 'BitKeeper', BitKeeperFactory)
+ env.BitKeeper = BitKeeperFactory
+
+ env['BITKEEPER'] = 'bk'
+ env['BITKEEPERGET'] = '$BITKEEPER get'
+ env['BITKEEPERGETFLAGS'] = SCons.Util.CLVar('')
+ env['BITKEEPERCOM'] = '$BITKEEPERGET $BITKEEPERGETFLAGS $TARGET'
+
+def exists(env):
+ return env.Detect('bk')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/CVS.py b/tools/scons/scons-local-1.2.0/SCons/Tool/CVS.py
new file mode 100644
index 000000000..e1cc04d1e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/CVS.py
@@ -0,0 +1,67 @@
+"""SCons.Tool.CVS.py
+
+Tool-specific initialization for CVS.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/CVS.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ CVS to an Environment."""
+
+ def CVSFactory(repos, module='', env=env):
+ """ """
+ # fail if repos is not an absolute path name?
+ if module != '':
+ # Don't use os.path.join() because the name we fetch might
+ # be across a network and must use POSIX slashes as separators.
+ module = module + '/'
+ env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS -d ${TARGET.dir} $CVSMODULE${TARGET.posix}'
+ act = SCons.Action.Action('$CVSCOM', '$CVSCOMSTR')
+ return SCons.Builder.Builder(action = act,
+ env = env,
+ CVSREPOSITORY = repos,
+ CVSMODULE = module)
+
+ #setattr(env, 'CVS', CVSFactory)
+ env.CVS = CVSFactory
+
+ env['CVS'] = 'cvs'
+ env['CVSFLAGS'] = SCons.Util.CLVar('-d $CVSREPOSITORY')
+ env['CVSCOFLAGS'] = SCons.Util.CLVar('')
+ env['CVSCOM'] = '$CVS $CVSFLAGS co $CVSCOFLAGS ${TARGET.posix}'
+
+def exists(env):
+ return env.Detect('cvs')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/FortranCommon.py b/tools/scons/scons-local-1.2.0/SCons/Tool/FortranCommon.py
new file mode 100644
index 000000000..8d3204ff1
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/FortranCommon.py
@@ -0,0 +1,241 @@
+"""SCons.Tool.FortranCommon
+
+Stuff for processing Fortran, common to all fortran dialects.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/FortranCommon.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+import os.path
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Scanner.Fortran
+import SCons.Tool
+import SCons.Util
+
+def isfortran(env, source):
+ """Return 1 if any of code in source has fortran files in it, 0
+ otherwise."""
+ try:
+ fsuffixes = env['FORTRANSUFFIXES']
+ except KeyError:
+ # If no FORTRANSUFFIXES, no fortran tool, so there is no need to look
+ # for fortran sources.
+ return 0
+
+ if not source:
+ # Source might be None for unusual cases like SConf.
+ return 0
+ for s in source:
+ if s.sources:
+ ext = os.path.splitext(str(s.sources[0]))[1]
+ if ext in fsuffixes:
+ return 1
+ return 0
+
+def _fortranEmitter(target, source, env):
+ node = source[0].rfile()
+ if not node.exists() and not node.is_derived():
+ print "Could not locate " + str(node.name)
+ return ([], [])
+ mod_regex = """(?i)^\s*MODULE\s+(?!PROCEDURE)(\w+)"""
+ cre = re.compile(mod_regex,re.M)
+ # Retrieve all USE'd module names
+ modules = cre.findall(node.get_contents())
+ # Remove unique items from the list
+ modules = SCons.Util.unique(modules)
+ # Convert module name to a .mod filename
+ suffix = env.subst('$FORTRANMODSUFFIX', target=target, source=source)
+ moddir = env.subst('$FORTRANMODDIR', target=target, source=source)
+ modules = map(lambda x, s=suffix: string.lower(x) + s, modules)
+ for m in modules:
+ target.append(env.fs.File(m, moddir))
+ return (target, source)
+
+def FortranEmitter(target, source, env):
+ target, source = _fortranEmitter(target, source, env)
+ return SCons.Defaults.StaticObjectEmitter(target, source, env)
+
+def ShFortranEmitter(target, source, env):
+ target, source = _fortranEmitter(target, source, env)
+ return SCons.Defaults.SharedObjectEmitter(target, source, env)
+
+def ComputeFortranSuffixes(suffixes, ppsuffixes):
+ """suffixes are fortran source files, and ppsuffixes the ones to be
+ pre-processed. Both should be sequences, not strings."""
+ assert len(suffixes) > 0
+ s = suffixes[0]
+ sup = string.upper(s)
+ upper_suffixes = map(string.upper, suffixes)
+ if SCons.Util.case_sensitive_suffixes(s, sup):
+ ppsuffixes.extend(upper_suffixes)
+ else:
+ suffixes.extend(upper_suffixes)
+
+def CreateDialectActions(dialect):
+ """Create dialect specific actions."""
+ CompAction = SCons.Action.Action('$%sCOM ' % dialect, '$%sCOMSTR' % dialect)
+ CompPPAction = SCons.Action.Action('$%sPPCOM ' % dialect, '$%sPPCOMSTR' % dialect)
+ ShCompAction = SCons.Action.Action('$SH%sCOM ' % dialect, '$SH%sCOMSTR' % dialect)
+ ShCompPPAction = SCons.Action.Action('$SH%sPPCOM ' % dialect, '$SH%sPPCOMSTR' % dialect)
+
+ return CompAction, CompPPAction, ShCompAction, ShCompPPAction
+
+def DialectAddToEnv(env, dialect, suffixes, ppsuffixes, support_module = 0):
+ """Add dialect specific construction variables."""
+ ComputeFortranSuffixes(suffixes, ppsuffixes)
+
+ fscan = SCons.Scanner.Fortran.FortranScan("%sPATH" % dialect)
+
+ for suffix in suffixes + ppsuffixes:
+ SCons.Tool.SourceFileScanner.add_scanner(suffix, fscan)
+
+ env.AppendUnique(FORTRANSUFFIXES = suffixes + ppsuffixes)
+
+ compaction, compppaction, shcompaction, shcompppaction = \
+ CreateDialectActions(dialect)
+
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in suffixes:
+ static_obj.add_action(suffix, compaction)
+ shared_obj.add_action(suffix, shcompaction)
+ static_obj.add_emitter(suffix, FortranEmitter)
+ shared_obj.add_emitter(suffix, ShFortranEmitter)
+
+ for suffix in ppsuffixes:
+ static_obj.add_action(suffix, compppaction)
+ shared_obj.add_action(suffix, shcompppaction)
+ static_obj.add_emitter(suffix, FortranEmitter)
+ shared_obj.add_emitter(suffix, ShFortranEmitter)
+
+ if not env.has_key('%sFLAGS' % dialect):
+ env['%sFLAGS' % dialect] = SCons.Util.CLVar('')
+
+ if not env.has_key('SH%sFLAGS' % dialect):
+ env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
+
+ # If a tool does not define fortran prefix/suffix for include path, use C ones
+ if not env.has_key('INC%sPREFIX' % dialect):
+ env['INC%sPREFIX' % dialect] = '$INCPREFIX'
+
+ if not env.has_key('INC%sSUFFIX' % dialect):
+ env['INC%sSUFFIX' % dialect] = '$INCSUFFIX'
+
+ env['_%sINCFLAGS' % dialect] = '$( ${_concat(INC%sPREFIX, %sPATH, INC%sSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)' % (dialect, dialect, dialect)
+
+ if support_module == 1:
+ env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
+ env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
+ env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
+ env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $_FORTRANMODFLAG $SOURCES' % (dialect, dialect, dialect)
+ else:
+ env['%sCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
+ env['%sPPCOM' % dialect] = '$%s -o $TARGET -c $%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
+ env['SH%sCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
+ env['SH%sPPCOM' % dialect] = '$SH%s -o $TARGET -c $SH%sFLAGS $CPPFLAGS $_CPPDEFFLAGS $_%sINCFLAGS $SOURCES' % (dialect, dialect, dialect)
+
+def add_fortran_to_env(env):
+ """Add Builders and construction variables for Fortran to an Environment."""
+ try:
+ FortranSuffixes = env['FORTRANFILESUFFIXES']
+ except KeyError:
+ FortranSuffixes = ['.f', '.for', '.ftn']
+
+ #print "Adding %s to fortran suffixes" % FortranSuffixes
+ try:
+ FortranPPSuffixes = env['FORTRANPPFILESUFFIXES']
+ except KeyError:
+ FortranPPSuffixes = ['.fpp', '.FPP']
+
+ DialectAddToEnv(env, "FORTRAN", FortranSuffixes,
+ FortranPPSuffixes, support_module = 1)
+
+ env['FORTRANMODPREFIX'] = '' # like $LIBPREFIX
+ env['FORTRANMODSUFFIX'] = '.mod' # like $LIBSUFFIX
+
+ env['FORTRANMODDIR'] = '' # where the compiler should place .mod files
+ env['FORTRANMODDIRPREFIX'] = '' # some prefix to $FORTRANMODDIR - similar to $INCPREFIX
+ env['FORTRANMODDIRSUFFIX'] = '' # some suffix to $FORTRANMODDIR - similar to $INCSUFFIX
+ env['_FORTRANMODFLAG'] = '$( ${_concat(FORTRANMODDIRPREFIX, FORTRANMODDIR, FORTRANMODDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
+
+def add_f77_to_env(env):
+ """Add Builders and construction variables for f77 to an Environment."""
+ try:
+ F77Suffixes = env['F77FILESUFFIXES']
+ except KeyError:
+ F77Suffixes = ['.f77']
+
+ #print "Adding %s to f77 suffixes" % F77Suffixes
+ try:
+ F77PPSuffixes = env['F77PPFILESUFFIXES']
+ except KeyError:
+ F77PPSuffixes = []
+
+ DialectAddToEnv(env, "F77", F77Suffixes, F77PPSuffixes)
+
+def add_f90_to_env(env):
+ """Add Builders and construction variables for f90 to an Environment."""
+ try:
+ F90Suffixes = env['F90FILESUFFIXES']
+ except KeyError:
+ F90Suffixes = ['.f90']
+
+ #print "Adding %s to f90 suffixes" % F90Suffixes
+ try:
+ F90PPSuffixes = env['F90PPFILESUFFIXES']
+ except KeyError:
+ F90PPSuffixes = []
+
+ DialectAddToEnv(env, "F90", F90Suffixes, F90PPSuffixes,
+ support_module = 1)
+
+def add_f95_to_env(env):
+ """Add Builders and construction variables for f95 to an Environment."""
+ try:
+ F95Suffixes = env['F95FILESUFFIXES']
+ except KeyError:
+ F95Suffixes = ['.f95']
+
+ #print "Adding %s to f95 suffixes" % F95Suffixes
+ try:
+ F95PPSuffixes = env['F95PPFILESUFFIXES']
+ except KeyError:
+ F95PPSuffixes = []
+
+ DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes,
+ support_module = 1)
+
+def add_all_to_env(env):
+ """Add builders and construction variables for all supported fortran
+ dialects."""
+ add_fortran_to_env(env)
+ add_f77_to_env(env)
+ add_f90_to_env(env)
+ add_f95_to_env(env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/JavaCommon.py b/tools/scons/scons-local-1.2.0/SCons/Tool/JavaCommon.py
new file mode 100644
index 000000000..12c31f37f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/JavaCommon.py
@@ -0,0 +1,317 @@
+"""SCons.Tool.JavaCommon
+
+Stuff for processing Java.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/JavaCommon.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import re
+import string
+
+java_parsing = 1
+
+default_java_version = '1.4'
+
+if java_parsing:
+ # Parse Java files for class names.
+ #
+ # This is a really cool parser from Charles Crain
+ # that finds appropriate class names in Java source.
+
+ # A regular expression that will find, in a java file:
+ # newlines;
+ # double-backslashes;
+ # a single-line comment "//";
+ # single or double quotes preceeded by a backslash;
+ # single quotes, double quotes, open or close braces, semi-colons,
+ # periods, open or close parentheses;
+ # floating-point numbers;
+ # any alphanumeric token (keyword, class name, specifier);
+ # any alphanumeric token surrounded by angle brackets (generics);
+ # the multi-line comment begin and end tokens /* and */;
+ # array declarations "[]".
+ _reToken = re.compile(r'(\n|\\\\|//|\\[\'"]|[\'"\{\}\;\.\(\)]|' +
+ r'\d*\.\d*|[A-Za-z_][\w\$\.]*|<[A-Za-z_]\w+>|' +
+ r'/\*|\*/|\[\])')
+
+ class OuterState:
+ """The initial state for parsing a Java file for classes,
+ interfaces, and anonymous inner classes."""
+ def __init__(self, version=default_java_version):
+
+ if not version in ('1.1', '1.2', '1.3','1.4', '1.5', '1.6'):
+ msg = "Java version %s not supported" % version
+ raise NotImplementedError, msg
+
+ self.version = version
+ self.listClasses = []
+ self.listOutputs = []
+ self.stackBrackets = []
+ self.brackets = 0
+ self.nextAnon = 1
+ self.localClasses = []
+ self.stackAnonClassBrackets = []
+ self.anonStacksStack = [[0]]
+ self.package = None
+
+ def trace(self):
+ pass
+
+ def __getClassState(self):
+ try:
+ return self.classState
+ except AttributeError:
+ ret = ClassState(self)
+ self.classState = ret
+ return ret
+
+ def __getPackageState(self):
+ try:
+ return self.packageState
+ except AttributeError:
+ ret = PackageState(self)
+ self.packageState = ret
+ return ret
+
+ def __getAnonClassState(self):
+ try:
+ return self.anonState
+ except AttributeError:
+ self.outer_state = self
+ ret = SkipState(1, AnonClassState(self))
+ self.anonState = ret
+ return ret
+
+ def __getSkipState(self):
+ try:
+ return self.skipState
+ except AttributeError:
+ ret = SkipState(1, self)
+ self.skipState = ret
+ return ret
+
+ def __getAnonStack(self):
+ return self.anonStacksStack[-1]
+
+ def openBracket(self):
+ self.brackets = self.brackets + 1
+
+ def closeBracket(self):
+ self.brackets = self.brackets - 1
+ if len(self.stackBrackets) and \
+ self.brackets == self.stackBrackets[-1]:
+ self.listOutputs.append(string.join(self.listClasses, '$'))
+ self.localClasses.pop()
+ self.listClasses.pop()
+ self.anonStacksStack.pop()
+ self.stackBrackets.pop()
+ if len(self.stackAnonClassBrackets) and \
+ self.brackets == self.stackAnonClassBrackets[-1]:
+ self.__getAnonStack().pop()
+ self.stackAnonClassBrackets.pop()
+
+ def parseToken(self, token):
+ if token[:2] == '//':
+ return IgnoreState('\n', self)
+ elif token == '/*':
+ return IgnoreState('*/', self)
+ elif token == '{':
+ self.openBracket()
+ elif token == '}':
+ self.closeBracket()
+ elif token in [ '"', "'" ]:
+ return IgnoreState(token, self)
+ elif token == "new":
+ # anonymous inner class
+ if len(self.listClasses) > 0:
+ return self.__getAnonClassState()
+ return self.__getSkipState() # Skip the class name
+ elif token in ['class', 'interface', 'enum']:
+ if len(self.listClasses) == 0:
+ self.nextAnon = 1
+ self.stackBrackets.append(self.brackets)
+ return self.__getClassState()
+ elif token == 'package':
+ return self.__getPackageState()
+ elif token == '.':
+ # Skip the attribute, it might be named "class", in which
+ # case we don't want to treat the following token as
+ # an inner class name...
+ return self.__getSkipState()
+ return self
+
+ def addAnonClass(self):
+ """Add an anonymous inner class"""
+ if self.version in ('1.1', '1.2', '1.3', '1.4'):
+ clazz = self.listClasses[0]
+ self.listOutputs.append('%s$%d' % (clazz, self.nextAnon))
+ elif self.version in ('1.5', '1.6'):
+ self.stackAnonClassBrackets.append(self.brackets)
+ className = []
+ className.extend(self.listClasses)
+ self.__getAnonStack()[-1] = self.__getAnonStack()[-1] + 1
+ for anon in self.__getAnonStack():
+ className.append(str(anon))
+ self.listOutputs.append(string.join(className, '$'))
+
+ self.nextAnon = self.nextAnon + 1
+ self.__getAnonStack().append(0)
+
+ def setPackage(self, package):
+ self.package = package
+
+ class AnonClassState:
+ """A state that looks for anonymous inner classes."""
+ def __init__(self, old_state):
+ # outer_state is always an instance of OuterState
+ self.outer_state = old_state.outer_state
+ self.old_state = old_state
+ self.brace_level = 0
+ def parseToken(self, token):
+ # This is an anonymous class if and only if the next
+ # non-whitespace token is a bracket. Everything between
+ # braces should be parsed as normal java code.
+ if token[:2] == '//':
+ return IgnoreState('\n', self)
+ elif token == '/*':
+ return IgnoreState('*/', self)
+ elif token == '\n':
+ return self
+ elif token[0] == '<' and token[-1] == '>':
+ return self
+ elif token == '(':
+ self.brace_level = self.brace_level + 1
+ return self
+ if self.brace_level > 0:
+ if token == 'new':
+ # look further for anonymous inner class
+ return SkipState(1, AnonClassState(self))
+ elif token in [ '"', "'" ]:
+ return IgnoreState(token, self)
+ elif token == ')':
+ self.brace_level = self.brace_level - 1
+ return self
+ if token == '{':
+ self.outer_state.addAnonClass()
+ return self.old_state.parseToken(token)
+
+ class SkipState:
+ """A state that will skip a specified number of tokens before
+ reverting to the previous state."""
+ def __init__(self, tokens_to_skip, old_state):
+ self.tokens_to_skip = tokens_to_skip
+ self.old_state = old_state
+ def parseToken(self, token):
+ self.tokens_to_skip = self.tokens_to_skip - 1
+ if self.tokens_to_skip < 1:
+ return self.old_state
+ return self
+
+ class ClassState:
+ """A state we go into when we hit a class or interface keyword."""
+ def __init__(self, outer_state):
+ # outer_state is always an instance of OuterState
+ self.outer_state = outer_state
+ def parseToken(self, token):
+ # the next non-whitespace token should be the name of the class
+ if token == '\n':
+ return self
+ # If that's an inner class which is declared in a method, it
+ # requires an index prepended to the class-name, e.g.
+ # 'Foo$1Inner' (Tigris Issue 2087)
+ if self.outer_state.localClasses and \
+ self.outer_state.stackBrackets[-1] > \
+ self.outer_state.stackBrackets[-2]+1:
+ locals = self.outer_state.localClasses[-1]
+ try:
+ idx = locals[token]
+ locals[token] = locals[token]+1
+ except KeyError:
+ locals[token] = 1
+ token = str(locals[token]) + token
+ self.outer_state.localClasses.append({})
+ self.outer_state.listClasses.append(token)
+ self.outer_state.anonStacksStack.append([0])
+ return self.outer_state
+
+ class IgnoreState:
+ """A state that will ignore all tokens until it gets to a
+ specified token."""
+ def __init__(self, ignore_until, old_state):
+ self.ignore_until = ignore_until
+ self.old_state = old_state
+ def parseToken(self, token):
+ if self.ignore_until == token:
+ return self.old_state
+ return self
+
+ class PackageState:
+ """The state we enter when we encounter the package keyword.
+ We assume the next token will be the package name."""
+ def __init__(self, outer_state):
+ # outer_state is always an instance of OuterState
+ self.outer_state = outer_state
+ def parseToken(self, token):
+ self.outer_state.setPackage(token)
+ return self.outer_state
+
+ def parse_java_file(fn, version=default_java_version):
+ return parse_java(open(fn, 'r').read(), version)
+
+ def parse_java(contents, version=default_java_version, trace=None):
+ """Parse a .java file and return a double of package directory,
+ plus a list of .class files that compiling that .java file will
+ produce"""
+ package = None
+ initial = OuterState(version)
+ currstate = initial
+ for token in _reToken.findall(contents):
+ # The regex produces a bunch of groups, but only one will
+ # have anything in it.
+ currstate = currstate.parseToken(token)
+ if trace: trace(token, currstate)
+ if initial.package:
+ package = string.replace(initial.package, '.', os.sep)
+ return (package, initial.listOutputs)
+
+else:
+ # Don't actually parse Java files for class names.
+ #
+ # We might make this a configurable option in the future if
+ # Java-file parsing takes too long (although it shouldn't relative
+ # to how long the Java compiler itself seems to take...).
+
+ def parse_java_file(fn):
+ """ "Parse" a .java file.
+
+ This actually just splits the file name, so the assumption here
+ is that the file name matches the public class name, and that
+ the path to the file is the same as the package name.
+ """
+ return os.path.split(file)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/Perforce.py b/tools/scons/scons-local-1.2.0/SCons/Tool/Perforce.py
new file mode 100644
index 000000000..97049f646
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/Perforce.py
@@ -0,0 +1,98 @@
+"""SCons.Tool.Perforce.py
+
+Tool-specific initialization for Perforce Source Code Management system.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/Perforce.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+
+import SCons.Action
+import SCons.Builder
+import SCons.Node.FS
+import SCons.Util
+
+# This function should maybe be moved to SCons.Util?
+from SCons.Tool.PharLapCommon import addPathIfNotExists
+
+
+
+# Variables that we want to import from the base OS environment.
+_import_env = [ 'P4PORT', 'P4CLIENT', 'P4USER', 'USER', 'USERNAME', 'P4PASSWD',
+ 'P4CHARSET', 'P4LANGUAGE', 'SYSTEMROOT' ]
+
+PerforceAction = SCons.Action.Action('$P4COM', '$P4COMSTR')
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ Perforce to an Environment."""
+
+ def PerforceFactory(env=env):
+ """ """
+ return SCons.Builder.Builder(action = PerforceAction, env = env)
+
+ #setattr(env, 'Perforce', PerforceFactory)
+ env.Perforce = PerforceFactory
+
+ env['P4'] = 'p4'
+ env['P4FLAGS'] = SCons.Util.CLVar('')
+ env['P4COM'] = '$P4 $P4FLAGS sync $TARGET'
+ try:
+ environ = env['ENV']
+ except KeyError:
+ environ = {}
+ env['ENV'] = environ
+
+ # Perforce seems to use the PWD environment variable rather than
+ # calling getcwd() for itself, which is odd. If no PWD variable
+ # is present, p4 WILL call getcwd, but this seems to cause problems
+ # with good ol' Windows's tilde-mangling for long file names.
+ environ['PWD'] = env.Dir('#').get_abspath()
+
+ for var in _import_env:
+ v = os.environ.get(var)
+ if v:
+ environ[var] = v
+
+ if SCons.Util.can_read_reg:
+ # If we can read the registry, add the path to Perforce to our environment.
+ try:
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
+ 'Software\\Perforce\\environment')
+ val, tok = SCons.Util.RegQueryValueEx(k, 'P4INSTROOT')
+ addPathIfNotExists(environ, 'PATH', val)
+ except SCons.Util.RegError:
+ # Can't detect where Perforce is, hope the user has it set in the
+ # PATH.
+ pass
+
+def exists(env):
+ return env.Detect('p4')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/PharLapCommon.py b/tools/scons/scons-local-1.2.0/SCons/Tool/PharLapCommon.py
new file mode 100644
index 000000000..76a566ab8
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/PharLapCommon.py
@@ -0,0 +1,132 @@
+"""SCons.Tool.PharLapCommon
+
+This module contains common code used by all Tools for the
+Phar Lap ETS tool chain. Right now, this is linkloc and
+386asm.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/PharLapCommon.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import SCons.Errors
+import SCons.Util
+import re
+import string
+
+def getPharLapPath():
+ """Reads the registry to find the installed path of the Phar Lap ETS
+ development kit.
+
+ Raises UserError if no installed version of Phar Lap can
+ be found."""
+
+ if not SCons.Util.can_read_reg:
+ raise SCons.Errors.InternalError, "No Windows registry module was found"
+ try:
+ k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ 'SOFTWARE\\Pharlap\\ETS')
+ val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
+
+ # The following is a hack...there is (not surprisingly)
+ # an odd issue in the Phar Lap plug in that inserts
+ # a bunch of junk data after the phar lap path in the
+ # registry. We must trim it.
+ idx=val.find('\0')
+ if idx >= 0:
+ val = val[:idx]
+
+ return os.path.normpath(val)
+ except SCons.Util.RegError:
+ raise SCons.Errors.UserError, "Cannot find Phar Lap ETS path in the registry. Is it installed properly?"
+
+REGEX_ETS_VER = re.compile(r'#define\s+ETS_VER\s+([0-9]+)')
+
+def getPharLapVersion():
+ """Returns the version of the installed ETS Tool Suite as a
+ decimal number. This version comes from the ETS_VER #define in
+ the embkern.h header. For example, '#define ETS_VER 1010' (which
+ is what Phar Lap 10.1 defines) would cause this method to return
+ 1010. Phar Lap 9.1 does not have such a #define, but this method
+ will return 910 as a default.
+
+ Raises UserError if no installed version of Phar Lap can
+ be found."""
+
+ include_path = os.path.join(getPharLapPath(), os.path.normpath("include/embkern.h"))
+ if not os.path.exists(include_path):
+ raise SCons.Errors.UserError, "Cannot find embkern.h in ETS include directory.\nIs Phar Lap ETS installed properly?"
+ mo = REGEX_ETS_VER.search(open(include_path, 'r').read())
+ if mo:
+ return int(mo.group(1))
+ # Default return for Phar Lap 9.1
+ return 910
+
+def addPathIfNotExists(env_dict, key, path, sep=os.pathsep):
+ """This function will take 'key' out of the dictionary
+ 'env_dict', then add the path 'path' to that key if it is not
+ already there. This treats the value of env_dict[key] as if it
+ has a similar format to the PATH variable...a list of paths
+ separated by tokens. The 'path' will get added to the list if it
+ is not already there."""
+ try:
+ is_list = 1
+ paths = env_dict[key]
+ if not SCons.Util.is_List(env_dict[key]):
+ paths = string.split(paths, sep)
+ is_list = 0
+ if not os.path.normcase(path) in map(os.path.normcase, paths):
+ paths = [ path ] + paths
+ if is_list:
+ env_dict[key] = paths
+ else:
+ env_dict[key] = string.join(paths, sep)
+ except KeyError:
+ env_dict[key] = path
+
+def addPharLapPaths(env):
+ """This function adds the path to the Phar Lap binaries, includes,
+ and libraries, if they are not already there."""
+ ph_path = getPharLapPath()
+
+ try:
+ env_dict = env['ENV']
+ except KeyError:
+ env_dict = {}
+ env['ENV'] = env_dict
+ addPathIfNotExists(env_dict, 'PATH',
+ os.path.join(ph_path, 'bin'))
+ addPathIfNotExists(env_dict, 'INCLUDE',
+ os.path.join(ph_path, 'include'))
+ addPathIfNotExists(env_dict, 'LIB',
+ os.path.join(ph_path, 'lib'))
+ addPathIfNotExists(env_dict, 'LIB',
+ os.path.join(ph_path, os.path.normpath('lib/vclib')))
+
+ env['PHARLAP_PATH'] = getPharLapPath()
+ env['PHARLAP_VERSION'] = str(getPharLapVersion())
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/RCS.py b/tools/scons/scons-local-1.2.0/SCons/Tool/RCS.py
new file mode 100644
index 000000000..6d4706048
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/RCS.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.RCS.py
+
+Tool-specific initialization for RCS.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/RCS.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ RCS to an Environment."""
+
+ def RCSFactory(env=env):
+ """ """
+ act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR')
+ return SCons.Builder.Builder(action = act, env = env)
+
+ #setattr(env, 'RCS', RCSFactory)
+ env.RCS = RCSFactory
+
+ env['RCS'] = 'rcs'
+ env['RCS_CO'] = 'co'
+ env['RCS_COFLAGS'] = SCons.Util.CLVar('')
+ env['RCS_COCOM'] = '$RCS_CO $RCS_COFLAGS $TARGET'
+
+def exists(env):
+ return env.Detect('rcs')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/SCCS.py b/tools/scons/scons-local-1.2.0/SCons/Tool/SCCS.py
new file mode 100644
index 000000000..842db137f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/SCCS.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.SCCS.py
+
+Tool-specific initialization for SCCS.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/SCCS.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ SCCS to an Environment."""
+
+ def SCCSFactory(env=env):
+ """ """
+ act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR')
+ return SCons.Builder.Builder(action = act, env = env)
+
+ #setattr(env, 'SCCS', SCCSFactory)
+ env.SCCS = SCCSFactory
+
+ env['SCCS'] = 'sccs'
+ env['SCCSFLAGS'] = SCons.Util.CLVar('')
+ env['SCCSGETFLAGS'] = SCons.Util.CLVar('')
+ env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET'
+
+def exists(env):
+ return env.Detect('sccs')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/Subversion.py b/tools/scons/scons-local-1.2.0/SCons/Tool/Subversion.py
new file mode 100644
index 000000000..a593c6abe
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/Subversion.py
@@ -0,0 +1,65 @@
+"""SCons.Tool.Subversion.py
+
+Tool-specific initialization for Subversion.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/Subversion.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add a Builder factory function and construction variables for
+ Subversion to an Environment."""
+
+ def SubversionFactory(repos, module='', env=env):
+ """ """
+ # fail if repos is not an absolute path name?
+ if module != '':
+ module = os.path.join(module, '')
+ act = SCons.Action.Action('$SVNCOM', '$SVNCOMSTR')
+ return SCons.Builder.Builder(action = act,
+ env = env,
+ SVNREPOSITORY = repos,
+ SVNMODULE = module)
+
+ #setattr(env, 'Subversion', SubversionFactory)
+ env.Subversion = SubversionFactory
+
+ env['SVN'] = 'svn'
+ env['SVNFLAGS'] = SCons.Util.CLVar('')
+ env['SVNCOM'] = '$SVN $SVNFLAGS cat $SVNREPOSITORY/$SVNMODULE$TARGET > $TARGET'
+
+def exists(env):
+ return env.Detect('svn')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Tool/__init__.py
new file mode 100644
index 000000000..0b032820b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/__init__.py
@@ -0,0 +1,667 @@
+"""SCons.Tool
+
+SCons tool selection.
+
+This looks for modules that define a callable object that can modify
+a construction environment as appropriate for a given tool (or tool
+chain).
+
+Note that because this subsystem just *selects* a callable that can
+modify a construction environment, it's possible for people to define
+their own "tool specification" in an arbitrary callable function. No
+one needs to use or tie in to this subsystem in order to roll their own
+tool definition.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import imp
+import sys
+
+import SCons.Builder
+import SCons.Errors
+import SCons.Node.FS
+import SCons.Scanner
+import SCons.Scanner.C
+import SCons.Scanner.D
+import SCons.Scanner.LaTeX
+import SCons.Scanner.Prog
+
+DefaultToolpath=[]
+
+CScanner = SCons.Scanner.C.CScanner()
+DScanner = SCons.Scanner.D.DScanner()
+LaTeXScanner = SCons.Scanner.LaTeX.LaTeXScanner()
+PDFLaTeXScanner = SCons.Scanner.LaTeX.PDFLaTeXScanner()
+ProgramScanner = SCons.Scanner.Prog.ProgramScanner()
+SourceFileScanner = SCons.Scanner.Base({}, name='SourceFileScanner')
+
+CSuffixes = [".c", ".C", ".cxx", ".cpp", ".c++", ".cc",
+ ".h", ".H", ".hxx", ".hpp", ".hh",
+ ".F", ".fpp", ".FPP",
+ ".m", ".mm",
+ ".S", ".spp", ".SPP"]
+
+DSuffixes = ['.d']
+
+IDLSuffixes = [".idl", ".IDL"]
+
+LaTeXSuffixes = [".tex", ".ltx", ".latex"]
+
+for suffix in CSuffixes:
+ SourceFileScanner.add_scanner(suffix, CScanner)
+
+for suffix in DSuffixes:
+ SourceFileScanner.add_scanner(suffix, DScanner)
+
+# FIXME: what should be done here? Two scanners scan the same extensions,
+# but look for different files, e.g., "picture.eps" vs. "picture.pdf".
+# The builders for DVI and PDF explicitly reference their scanners
+# I think that means this is not needed???
+for suffix in LaTeXSuffixes:
+ SourceFileScanner.add_scanner(suffix, LaTeXScanner)
+ SourceFileScanner.add_scanner(suffix, PDFLaTeXScanner)
+
+class Tool:
+ def __init__(self, name, toolpath=[], **kw):
+ self.name = name
+ self.toolpath = toolpath + DefaultToolpath
+ # remember these so we can merge them into the call
+ self.init_kw = kw
+
+ module = self._tool_module()
+ self.generate = module.generate
+ self.exists = module.exists
+ if hasattr(module, 'options'):
+ self.options = module.options
+
+ def _tool_module(self):
+ # TODO: Interchange zipimport with normal initilization for better error reporting
+ oldpythonpath = sys.path
+ sys.path = self.toolpath + sys.path
+
+ try:
+ try:
+ file, path, desc = imp.find_module(self.name, self.toolpath)
+ try:
+ return imp.load_module(self.name, file, path, desc)
+ finally:
+ if file:
+ file.close()
+ except ImportError, e:
+ if str(e)!="No module named %s"%self.name:
+ raise SCons.Errors.EnvironmentError, e
+ try:
+ import zipimport
+ except ImportError:
+ pass
+ else:
+ for aPath in self.toolpath:
+ try:
+ importer = zipimport.zipimporter(aPath)
+ return importer.load_module(self.name)
+ except ImportError, e:
+ pass
+ finally:
+ sys.path = oldpythonpath
+
+ full_name = 'SCons.Tool.' + self.name
+ try:
+ return sys.modules[full_name]
+ except KeyError:
+ try:
+ smpath = sys.modules['SCons.Tool'].__path__
+ try:
+ file, path, desc = imp.find_module(self.name, smpath)
+ module = imp.load_module(full_name, file, path, desc)
+ setattr(SCons.Tool, self.name, module)
+ if file:
+ file.close()
+ return module
+ except ImportError, e:
+ if str(e)!="No module named %s"%self.name:
+ raise SCons.Errors.EnvironmentError, e
+ try:
+ import zipimport
+ importer = zipimport.zipimporter( sys.modules['SCons.Tool'].__path__[0] )
+ module = importer.load_module(full_name)
+ setattr(SCons.Tool, self.name, module)
+ return module
+ except ImportError, e:
+ m = "No tool named '%s': %s" % (self.name, e)
+ raise SCons.Errors.EnvironmentError, m
+ except ImportError, e:
+ m = "No tool named '%s': %s" % (self.name, e)
+ raise SCons.Errors.EnvironmentError, m
+
+ def __call__(self, env, *args, **kw):
+ if self.init_kw is not None:
+ # Merge call kws into init kws;
+ # but don't bash self.init_kw.
+ if kw is not None:
+ call_kw = kw
+ kw = self.init_kw.copy()
+ kw.update(call_kw)
+ else:
+ kw = self.init_kw
+ env.Append(TOOLS = [ self.name ])
+ if hasattr(self, 'options'):
+ import SCons.Variables
+ if not env.has_key('options'):
+ from SCons.Script import ARGUMENTS
+ env['options']=SCons.Variables.Variables(args=ARGUMENTS)
+ opts=env['options']
+
+ self.options(opts)
+ opts.Update(env)
+
+ apply(self.generate, ( env, ) + args, kw)
+
+ def __str__(self):
+ return self.name
+
+##########################################################################
+# Create common executable program / library / object builders
+
+def createProgBuilder(env):
+ """This is a utility function that creates the Program
+ Builder in an Environment if it is not there already.
+
+ If it is already there, we return the existing one.
+ """
+
+ try:
+ program = env['BUILDERS']['Program']
+ except KeyError:
+ import SCons.Defaults
+ program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction,
+ emitter = '$PROGEMITTER',
+ prefix = '$PROGPREFIX',
+ suffix = '$PROGSUFFIX',
+ src_suffix = '$OBJSUFFIX',
+ src_builder = 'Object',
+ target_scanner = ProgramScanner)
+ env['BUILDERS']['Program'] = program
+
+ return program
+
+def createStaticLibBuilder(env):
+ """This is a utility function that creates the StaticLibrary
+ Builder in an Environment if it is not there already.
+
+ If it is already there, we return the existing one.
+ """
+
+ try:
+ static_lib = env['BUILDERS']['StaticLibrary']
+ except KeyError:
+ action_list = [ SCons.Action.Action("$ARCOM", "$ARCOMSTR") ]
+ if env.Detect('ranlib'):
+ ranlib_action = SCons.Action.Action("$RANLIBCOM", "$RANLIBCOMSTR")
+ action_list.append(ranlib_action)
+
+ static_lib = SCons.Builder.Builder(action = action_list,
+ emitter = '$LIBEMITTER',
+ prefix = '$LIBPREFIX',
+ suffix = '$LIBSUFFIX',
+ src_suffix = '$OBJSUFFIX',
+ src_builder = 'StaticObject')
+ env['BUILDERS']['StaticLibrary'] = static_lib
+ env['BUILDERS']['Library'] = static_lib
+
+ return static_lib
+
+def createSharedLibBuilder(env):
+ """This is a utility function that creates the SharedLibrary
+ Builder in an Environment if it is not there already.
+
+ If it is already there, we return the existing one.
+ """
+
+ try:
+ shared_lib = env['BUILDERS']['SharedLibrary']
+ except KeyError:
+ import SCons.Defaults
+ action_list = [ SCons.Defaults.SharedCheck,
+ SCons.Defaults.ShLinkAction ]
+ shared_lib = SCons.Builder.Builder(action = action_list,
+ emitter = "$SHLIBEMITTER",
+ prefix = '$SHLIBPREFIX',
+ suffix = '$SHLIBSUFFIX',
+ target_scanner = ProgramScanner,
+ src_suffix = '$SHOBJSUFFIX',
+ src_builder = 'SharedObject')
+ env['BUILDERS']['SharedLibrary'] = shared_lib
+
+ return shared_lib
+
+def createLoadableModuleBuilder(env):
+ """This is a utility function that creates the LoadableModule
+ Builder in an Environment if it is not there already.
+
+ If it is already there, we return the existing one.
+ """
+
+ try:
+ ld_module = env['BUILDERS']['LoadableModule']
+ except KeyError:
+ import SCons.Defaults
+ action_list = [ SCons.Defaults.SharedCheck,
+ SCons.Defaults.LdModuleLinkAction ]
+ ld_module = SCons.Builder.Builder(action = action_list,
+ emitter = "$SHLIBEMITTER",
+ prefix = '$LDMODULEPREFIX',
+ suffix = '$LDMODULESUFFIX',
+ target_scanner = ProgramScanner,
+ src_suffix = '$SHOBJSUFFIX',
+ src_builder = 'SharedObject')
+ env['BUILDERS']['LoadableModule'] = ld_module
+
+ return ld_module
+
+def createObjBuilders(env):
+ """This is a utility function that creates the StaticObject
+ and SharedObject Builders in an Environment if they
+ are not there already.
+
+ If they are there already, we return the existing ones.
+
+ This is a separate function because soooo many Tools
+ use this functionality.
+
+ The return is a 2-tuple of (StaticObject, SharedObject)
+ """
+
+
+ try:
+ static_obj = env['BUILDERS']['StaticObject']
+ except KeyError:
+ static_obj = SCons.Builder.Builder(action = {},
+ emitter = {},
+ prefix = '$OBJPREFIX',
+ suffix = '$OBJSUFFIX',
+ src_builder = ['CFile', 'CXXFile'],
+ source_scanner = SourceFileScanner,
+ single_source = 1)
+ env['BUILDERS']['StaticObject'] = static_obj
+ env['BUILDERS']['Object'] = static_obj
+
+ try:
+ shared_obj = env['BUILDERS']['SharedObject']
+ except KeyError:
+ shared_obj = SCons.Builder.Builder(action = {},
+ emitter = {},
+ prefix = '$SHOBJPREFIX',
+ suffix = '$SHOBJSUFFIX',
+ src_builder = ['CFile', 'CXXFile'],
+ source_scanner = SourceFileScanner,
+ single_source = 1)
+ env['BUILDERS']['SharedObject'] = shared_obj
+
+ return (static_obj, shared_obj)
+
+def createCFileBuilders(env):
+ """This is a utility function that creates the CFile/CXXFile
+ Builders in an Environment if they
+ are not there already.
+
+ If they are there already, we return the existing ones.
+
+ This is a separate function because soooo many Tools
+ use this functionality.
+
+ The return is a 2-tuple of (CFile, CXXFile)
+ """
+
+ try:
+ c_file = env['BUILDERS']['CFile']
+ except KeyError:
+ c_file = SCons.Builder.Builder(action = {},
+ emitter = {},
+ suffix = {None:'$CFILESUFFIX'})
+ env['BUILDERS']['CFile'] = c_file
+
+ env.SetDefault(CFILESUFFIX = '.c')
+
+ try:
+ cxx_file = env['BUILDERS']['CXXFile']
+ except KeyError:
+ cxx_file = SCons.Builder.Builder(action = {},
+ emitter = {},
+ suffix = {None:'$CXXFILESUFFIX'})
+ env['BUILDERS']['CXXFile'] = cxx_file
+ env.SetDefault(CXXFILESUFFIX = '.cc')
+
+ return (c_file, cxx_file)
+
+##########################################################################
+# Create common Java builders
+
+def CreateJarBuilder(env):
+ try:
+ java_jar = env['BUILDERS']['Jar']
+ except KeyError:
+ fs = SCons.Node.FS.get_default_fs()
+ jar_com = SCons.Action.Action('$JARCOM', '$JARCOMSTR')
+ java_jar = SCons.Builder.Builder(action = jar_com,
+ suffix = '$JARSUFFIX',
+ src_suffix = '$JAVACLASSSUFIX',
+ src_builder = 'JavaClassFile',
+ source_factory = fs.Entry)
+ env['BUILDERS']['Jar'] = java_jar
+ return java_jar
+
+def CreateJavaHBuilder(env):
+ try:
+ java_javah = env['BUILDERS']['JavaH']
+ except KeyError:
+ fs = SCons.Node.FS.get_default_fs()
+ java_javah_com = SCons.Action.Action('$JAVAHCOM', '$JAVAHCOMSTR')
+ java_javah = SCons.Builder.Builder(action = java_javah_com,
+ src_suffix = '$JAVACLASSSUFFIX',
+ target_factory = fs.Entry,
+ source_factory = fs.File,
+ src_builder = 'JavaClassFile')
+ env['BUILDERS']['JavaH'] = java_javah
+ return java_javah
+
+def CreateJavaClassFileBuilder(env):
+ try:
+ java_class_file = env['BUILDERS']['JavaClassFile']
+ except KeyError:
+ fs = SCons.Node.FS.get_default_fs()
+ javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR')
+ java_class_file = SCons.Builder.Builder(action = javac_com,
+ emitter = {},
+ #suffix = '$JAVACLASSSUFFIX',
+ src_suffix = '$JAVASUFFIX',
+ src_builder = ['JavaFile'],
+ target_factory = fs.Entry,
+ source_factory = fs.File)
+ env['BUILDERS']['JavaClassFile'] = java_class_file
+ return java_class_file
+
+def CreateJavaClassDirBuilder(env):
+ try:
+ java_class_dir = env['BUILDERS']['JavaClassDir']
+ except KeyError:
+ fs = SCons.Node.FS.get_default_fs()
+ javac_com = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR')
+ java_class_dir = SCons.Builder.Builder(action = javac_com,
+ emitter = {},
+ target_factory = fs.Dir,
+ source_factory = fs.Dir)
+ env['BUILDERS']['JavaClassDir'] = java_class_dir
+ return java_class_dir
+
+def CreateJavaFileBuilder(env):
+ try:
+ java_file = env['BUILDERS']['JavaFile']
+ except KeyError:
+ java_file = SCons.Builder.Builder(action = {},
+ emitter = {},
+ suffix = {None:'$JAVASUFFIX'})
+ env['BUILDERS']['JavaFile'] = java_file
+ env['JAVASUFFIX'] = '.java'
+ return java_file
+
+class ToolInitializerMethod:
+ """
+ This is added to a construction environment in place of a
+ method(s) normally called for a Builder (env.Object, env.StaticObject,
+ etc.). When called, it has its associated ToolInitializer
+ object search the specified list of tools and apply the first
+ one that exists to the construction environment. It then calls
+ whatever builder was (presumably) added to the construction
+ environment in place of this particular instance.
+ """
+ def __init__(self, name, initializer):
+ """
+ Note: we store the tool name as __name__ so it can be used by
+ the class that attaches this to a construction environment.
+ """
+ self.__name__ = name
+ self.initializer = initializer
+
+ def get_builder(self, env):
+ """
+ Returns the appropriate real Builder for this method name
+ after having the associated ToolInitializer object apply
+ the appropriate Tool module.
+ """
+ builder = getattr(env, self.__name__)
+
+ self.initializer.apply_tools(env)
+
+ builder = getattr(env, self.__name__)
+ if builder is self:
+ # There was no Builder added, which means no valid Tool
+ # for this name was found (or possibly there's a mismatch
+ # between the name we were called by and the Builder name
+ # added by the Tool module).
+ return None
+
+ self.initializer.remove_methods(env)
+
+ return builder
+
+ def __call__(self, env, *args, **kw):
+ """
+ """
+ builder = self.get_builder(env)
+ if builder is None:
+ return [], []
+ return apply(builder, args, kw)
+
+class ToolInitializer:
+ """
+ A class for delayed initialization of Tools modules.
+
+ Instances of this class associate a list of Tool modules with
+ a list of Builder method names that will be added by those Tool
+ modules. As part of instantiating this object for a particular
+ construction environment, we also add the appropriate
+ ToolInitializerMethod objects for the various Builder methods
+ that we want to use to delay Tool searches until necessary.
+ """
+ def __init__(self, env, tools, names):
+ if not SCons.Util.is_List(tools):
+ tools = [tools]
+ if not SCons.Util.is_List(names):
+ names = [names]
+ self.env = env
+ self.tools = tools
+ self.names = names
+ self.methods = {}
+ for name in names:
+ method = ToolInitializerMethod(name, self)
+ self.methods[name] = method
+ env.AddMethod(method)
+
+ def remove_methods(self, env):
+ """
+ Removes the methods that were added by the tool initialization
+ so we no longer copy and re-bind them when the construction
+ environment gets cloned.
+ """
+ for method in self.methods.values():
+ env.RemoveMethod(method)
+
+ def apply_tools(self, env):
+ """
+ Searches the list of associated Tool modules for one that
+ exists, and applies that to the construction environment.
+ """
+ for t in self.tools:
+ tool = SCons.Tool.Tool(t)
+ if tool.exists(env):
+ env.Tool(tool)
+ return
+
+ # If we fall through here, there was no tool module found.
+ # This is where we can put an informative error message
+ # about the inability to find the tool. We'll start doing
+ # this as we cut over more pre-defined Builder+Tools to use
+ # the ToolInitializer class.
+
+def Initializers(env):
+ ToolInitializer(env, ['install'], ['_InternalInstall', '_InternalInstallAs'])
+ def Install(self, *args, **kw):
+ return apply(self._InternalInstall, args, kw)
+ def InstallAs(self, *args, **kw):
+ return apply(self._InternalInstallAs, args, kw)
+ env.AddMethod(Install)
+ env.AddMethod(InstallAs)
+
+def FindTool(tools, env):
+ for tool in tools:
+ t = Tool(tool)
+ if t.exists(env):
+ return tool
+ return None
+
+def FindAllTools(tools, env):
+ def ToolExists(tool, env=env):
+ return Tool(tool).exists(env)
+ return filter (ToolExists, tools)
+
+def tool_list(platform, env):
+
+ # XXX this logic about what tool to prefer on which platform
+ # should be moved into either the platform files or
+ # the tool files themselves.
+ # The search orders here are described in the man page. If you
+ # change these search orders, update the man page as well.
+ if str(platform) == 'win32':
+ "prefer Microsoft tools on Windows"
+ linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ]
+ c_compilers = ['msvc', 'mingw', 'gcc', 'intelc', 'icl', 'icc', 'cc', 'bcc32' ]
+ cxx_compilers = ['msvc', 'intelc', 'icc', 'g++', 'c++', 'bcc32' ]
+ assemblers = ['masm', 'nasm', 'gas', '386asm' ]
+ fortran_compilers = ['gfortran', 'g77', 'ifl', 'cvf', 'f95', 'f90', 'fortran']
+ ars = ['mslib', 'ar', 'tlib']
+ elif str(platform) == 'os2':
+ "prefer IBM tools on OS/2"
+ linkers = ['ilink', 'gnulink', 'mslink']
+ c_compilers = ['icc', 'gcc', 'msvc', 'cc']
+ cxx_compilers = ['icc', 'g++', 'msvc', 'c++']
+ assemblers = ['nasm', 'masm', 'gas']
+ fortran_compilers = ['ifl', 'g77']
+ ars = ['ar', 'mslib']
+ elif str(platform) == 'irix':
+ "prefer MIPSPro on IRIX"
+ linkers = ['sgilink', 'gnulink']
+ c_compilers = ['sgicc', 'gcc', 'cc']
+ cxx_compilers = ['sgic++', 'g++', 'c++']
+ assemblers = ['as', 'gas']
+ fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran']
+ ars = ['sgiar']
+ elif str(platform) == 'sunos':
+ "prefer Forte tools on SunOS"
+ linkers = ['sunlink', 'gnulink']
+ c_compilers = ['suncc', 'gcc', 'cc']
+ cxx_compilers = ['sunc++', 'g++', 'c++']
+ assemblers = ['as', 'gas']
+ fortran_compilers = ['sunf95', 'sunf90', 'sunf77', 'f95', 'f90', 'f77',
+ 'gfortran', 'g77', 'fortran']
+ ars = ['sunar']
+ elif str(platform) == 'hpux':
+ "prefer aCC tools on HP-UX"
+ linkers = ['hplink', 'gnulink']
+ c_compilers = ['hpcc', 'gcc', 'cc']
+ cxx_compilers = ['hpc++', 'g++', 'c++']
+ assemblers = ['as', 'gas']
+ fortran_compilers = ['f95', 'f90', 'f77', 'g77', 'fortran']
+ ars = ['ar']
+ elif str(platform) == 'aix':
+ "prefer AIX Visual Age tools on AIX"
+ linkers = ['aixlink', 'gnulink']
+ c_compilers = ['aixcc', 'gcc', 'cc']
+ cxx_compilers = ['aixc++', 'g++', 'c++']
+ assemblers = ['as', 'gas']
+ fortran_compilers = ['f95', 'f90', 'aixf77', 'g77', 'fortran']
+ ars = ['ar']
+ elif str(platform) == 'darwin':
+ "prefer GNU tools on Mac OS X, except for some linkers and IBM tools"
+ linkers = ['applelink', 'gnulink']
+ c_compilers = ['gcc', 'cc']
+ cxx_compilers = ['g++', 'c++']
+ assemblers = ['as']
+ fortran_compilers = ['gfortran', 'f95', 'f90', 'g77']
+ ars = ['ar']
+ else:
+ "prefer GNU tools on all other platforms"
+ linkers = ['gnulink', 'mslink', 'ilink']
+ c_compilers = ['gcc', 'msvc', 'intelc', 'icc', 'cc']
+ cxx_compilers = ['g++', 'msvc', 'intelc', 'icc', 'c++']
+ assemblers = ['gas', 'nasm', 'masm']
+ fortran_compilers = ['gfortran', 'g77', 'ifort', 'ifl', 'f95', 'f90', 'f77']
+ ars = ['ar', 'mslib']
+
+ c_compiler = FindTool(c_compilers, env) or c_compilers[0]
+
+ # XXX this logic about what tool provides what should somehow be
+ # moved into the tool files themselves.
+ if c_compiler and c_compiler == 'mingw':
+ # MinGW contains a linker, C compiler, C++ compiler,
+ # Fortran compiler, archiver and assembler:
+ cxx_compiler = None
+ linker = None
+ assembler = None
+ fortran_compiler = None
+ ar = None
+ else:
+ # Don't use g++ if the C compiler has built-in C++ support:
+ if c_compiler in ('msvc', 'intelc', 'icc'):
+ cxx_compiler = None
+ else:
+ cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0]
+ linker = FindTool(linkers, env) or linkers[0]
+ assembler = FindTool(assemblers, env) or assemblers[0]
+ fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0]
+ ar = FindTool(ars, env) or ars[0]
+
+ other_tools = FindAllTools(['BitKeeper', 'CVS',
+ 'dmd',
+ 'filesystem',
+ 'dvipdf', 'dvips', 'gs',
+ 'jar', 'javac', 'javah',
+ 'latex', 'lex',
+ 'm4', 'midl', 'msvs',
+ 'pdflatex', 'pdftex', 'Perforce',
+ 'RCS', 'rmic', 'rpcgen',
+ 'SCCS',
+ # 'Subversion',
+ 'swig',
+ 'tar', 'tex',
+ 'yacc', 'zip', 'rpm', 'wix'],
+ env)
+
+ tools = ([linker, c_compiler, cxx_compiler,
+ fortran_compiler, assembler, ar]
+ + other_tools)
+
+ return filter(lambda x: x, tools)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/aixc++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/aixc++.py
new file mode 100644
index 000000000..5db91f768
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/aixc++.py
@@ -0,0 +1,76 @@
+"""SCons.Tool.aixc++
+
+Tool-specific initialization for IBM xlC / Visual Age C++ compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/aixc++.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Platform.aix
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+packages = ['vacpp.cmp.core', 'vacpp.cmp.batch', 'vacpp.cmp.C', 'ibmcxx.cmp']
+
+def get_xlc(env):
+ xlc = env.get('CXX', 'xlC')
+ xlc_r = env.get('SHCXX', 'xlC_r')
+ return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages)
+
+def smart_cxxflags(source, target, env, for_signature):
+ build_dir = env.GetBuildPath()
+ if build_dir:
+ return '-qtempinc=' + os.path.join(build_dir, 'tempinc')
+ return ''
+
+def generate(env):
+ """Add Builders and construction variables for xlC / Visual Age
+ suite to an Environment."""
+ path, _cxx, _shcxx, version = get_xlc(env)
+ if path:
+ _cxx = os.path.join(path, _cxx)
+ _shcxx = os.path.join(path, _shcxx)
+
+ cplusplus.generate(env)
+
+ env['CXX'] = _cxx
+ env['SHCXX'] = _shcxx
+ env['CXXVERSION'] = version
+ env['SHOBJSUFFIX'] = '.pic.o'
+
+def exists(env):
+ path, _cxx, _shcxx, version = get_xlc(env)
+ if path and _cxx:
+ xlc = os.path.join(path, _cxx)
+ if os.path.exists(xlc):
+ return xlc
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/aixcc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/aixcc.py
new file mode 100644
index 000000000..3c0b9d768
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/aixcc.py
@@ -0,0 +1,68 @@
+"""SCons.Tool.aixcc
+
+Tool-specific initialization for IBM xlc / Visual Age C compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/aixcc.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Platform.aix
+
+import cc
+
+packages = ['vac.C', 'ibmcxx.cmp']
+
+def get_xlc(env):
+ xlc = env.get('CC', 'xlc')
+ xlc_r = env.get('SHCC', 'xlc_r')
+ return SCons.Platform.aix.get_xlc(env, xlc, xlc_r, packages)
+
+def generate(env):
+ """Add Builders and construction variables for xlc / Visual Age
+ suite to an Environment."""
+ path, _cc, _shcc, version = get_xlc(env)
+ if path:
+ _cc = os.path.join(path, _cc)
+ _shcc = os.path.join(path, _shcc)
+
+ cc.generate(env)
+
+ env['CC'] = _cc
+ env['SHCC'] = _shcc
+ env['CCVERSION'] = version
+
+def exists(env):
+ path, _cc, _shcc, version = get_xlc(env)
+ if path and _cc:
+ xlc = os.path.join(path, _cc)
+ if os.path.exists(xlc):
+ return xlc
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/aixf77.py b/tools/scons/scons-local-1.2.0/SCons/Tool/aixf77.py
new file mode 100644
index 000000000..794f7e220
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/aixf77.py
@@ -0,0 +1,74 @@
+"""engine.SCons.Tool.aixf77
+
+Tool-specific initialization for IBM Visual Age f77 Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/aixf77.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+#import SCons.Platform.aix
+
+import f77
+
+# It would be good to look for the AIX F77 package the same way we're now
+# looking for the C and C++ packages. This should be as easy as supplying
+# the correct package names in the following list and uncommenting the
+# SCons.Platform.aix_get_xlc() call the in the function below.
+packages = []
+
+def get_xlf77(env):
+ xlf77 = env.get('F77', 'xlf77')
+ xlf77_r = env.get('SHF77', 'xlf77_r')
+ #return SCons.Platform.aix.get_xlc(env, xlf77, xlf77_r, packages)
+ return (None, xlf77, xlf77_r, None)
+
+def generate(env):
+ """
+ Add Builders and construction variables for the Visual Age FORTRAN
+ compiler to an Environment.
+ """
+ path, _f77, _shf77, version = get_xlf77(env)
+ if path:
+ _f77 = os.path.join(path, _f77)
+ _shf77 = os.path.join(path, _shf77)
+
+ f77.generate(env)
+
+ env['F77'] = _f77
+ env['SHF77'] = _shf77
+
+def exists(env):
+ path, _f77, _shf77, version = get_xlf77(env)
+ if path and _f77:
+ xlf77 = os.path.join(path, _f77)
+ if os.path.exists(xlf77):
+ return xlf77
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/aixlink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/aixlink.py
new file mode 100644
index 000000000..3a1182a64
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/aixlink.py
@@ -0,0 +1,70 @@
+"""SCons.Tool.aixlink
+
+Tool-specific initialization for the IBM Visual Age linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/aixlink.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+
+import SCons.Util
+
+import aixcc
+import link
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+def smart_linkflags(source, target, env, for_signature):
+ if cplusplus.iscplusplus(source):
+ build_dir = env.subst('$BUILDDIR', target=target, source=source)
+ if build_dir:
+ return '-qtempinc=' + os.path.join(build_dir, 'tempinc')
+ return ''
+
+def generate(env):
+ """
+ Add Builders and construction variables for Visual Age linker to
+ an Environment.
+ """
+ link.generate(env)
+
+ env['SMARTLINKFLAGS'] = smart_linkflags
+ env['LINKFLAGS'] = SCons.Util.CLVar('$SMARTLINKFLAGS')
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -qmkshrobj -qsuppress=1501-218')
+ env['SHLIBSUFFIX'] = '.a'
+
+def exists(env):
+ path, _cc, _shcc, version = aixcc.get_xlc(env)
+ if path and _cc:
+ xlc = os.path.join(path, _cc)
+ if os.path.exists(xlc):
+ return xlc
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/applelink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/applelink.py
new file mode 100644
index 000000000..eb8df8caf
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/applelink.py
@@ -0,0 +1,65 @@
+"""SCons.Tool.applelink
+
+Tool-specific initialization for the Apple gnu-like linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/applelink.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+# Even though the Mac is based on the GNU toolchain, it doesn't understand
+# the -rpath option, so we use the "link" tool instead of "gnulink".
+import link
+
+def generate(env):
+ """Add Builders and construction variables for applelink to an
+ Environment."""
+ link.generate(env)
+
+ env['FRAMEWORKPATHPREFIX'] = '-F'
+ env['_FRAMEWORKPATH'] = '${_concat(FRAMEWORKPATHPREFIX, FRAMEWORKPATH, "", __env__)}'
+ env['_FRAMEWORKS'] = '${_concat("-framework ", FRAMEWORKS, "", __env__)}'
+ env['LINKCOM'] = env['LINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -dynamiclib')
+ env['SHLINKCOM'] = env['SHLINKCOM'] + ' $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
+
+ # override the default for loadable modules, which are different
+ # on OS X than dynamic shared libs. echoing what XCode does for
+ # pre/suffixes:
+ env['LDMODULEPREFIX'] = ''
+ env['LDMODULESUFFIX'] = ''
+ env['LDMODULEFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -bundle')
+ env['LDMODULECOM'] = '$LDMODULE -o ${TARGET} $LDMODULEFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS $_FRAMEWORKPATH $_FRAMEWORKS $FRAMEWORKSFLAGS'
+
+
+
+def exists(env):
+ return env['PLATFORM'] == 'darwin'
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/ar.py b/tools/scons/scons-local-1.2.0/SCons/Tool/ar.py
new file mode 100644
index 000000000..7812fb3f2
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/ar.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.ar
+
+Tool-specific initialization for ar (library archive).
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/ar.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+
+ env['AR'] = 'ar'
+ env['ARFLAGS'] = SCons.Util.CLVar('rc')
+ env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
+ env['LIBPREFIX'] = 'lib'
+ env['LIBSUFFIX'] = '.a'
+
+ if env.Detect('ranlib'):
+ env['RANLIB'] = 'ranlib'
+ env['RANLIBFLAGS'] = SCons.Util.CLVar('')
+ env['RANLIBCOM'] = '$RANLIB $RANLIBFLAGS $TARGET'
+
+def exists(env):
+ return env.Detect('ar')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/as.py b/tools/scons/scons-local-1.2.0/SCons/Tool/as.py
new file mode 100644
index 000000000..623c8d75c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/as.py
@@ -0,0 +1,72 @@
+"""SCons.Tool.as
+
+Tool-specific initialization for as, the generic Posix assembler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/as.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+assemblers = ['as']
+
+ASSuffixes = ['.s', '.asm', '.ASM']
+ASPPSuffixes = ['.spp', '.SPP', '.sx']
+if SCons.Util.case_sensitive_suffixes('.s', '.S'):
+ ASPPSuffixes.extend(['.S'])
+else:
+ ASSuffixes.extend(['.S'])
+
+def generate(env):
+ """Add Builders and construction variables for as to an Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in ASSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ASAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ for suffix in ASPPSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ASPPAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ env['AS'] = env.Detect(assemblers) or 'as'
+ env['ASFLAGS'] = SCons.Util.CLVar('')
+ env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
+ env['ASPPFLAGS'] = '$ASFLAGS'
+ env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
+
+def exists(env):
+ return env.Detect(assemblers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/bcc32.py b/tools/scons/scons-local-1.2.0/SCons/Tool/bcc32.py
new file mode 100644
index 000000000..0488ba780
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/bcc32.py
@@ -0,0 +1,76 @@
+"""SCons.Tool.bcc32
+
+XXX
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/bcc32.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+def findIt(program, env):
+ # First search in the SCons path and then the OS path:
+ borwin = env.WhereIs(program) or SCons.Util.WhereIs(program)
+ if borwin:
+ dir = os.path.dirname(borwin)
+ env.PrependENVPath('PATH', dir)
+ return borwin
+
+def generate(env):
+ findIt('bcc32', env)
+ """Add Builders and construction variables for bcc to an
+ Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+ for suffix in ['.c', '.cpp']:
+ static_obj.add_action(suffix, SCons.Defaults.CAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ env['CC'] = 'bcc32'
+ env['CCFLAGS'] = SCons.Util.CLVar('')
+ env['CFLAGS'] = SCons.Util.CLVar('')
+ env['CCCOM'] = '$CC -q $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
+ env['SHCC'] = '$CC'
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
+ env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
+ env['SHCCCOM'] = '$SHCC -WD $SHCFLAGS $SHCCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o$TARGET $SOURCES'
+ env['CPPDEFPREFIX'] = '-D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '-I'
+ env['INCSUFFIX'] = ''
+ env['SHOBJSUFFIX'] = '.dll'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
+ env['CFILESUFFIX'] = '.cpp'
+
+def exists(env):
+ return findIt('bcc32', env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/c++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/c++.py
new file mode 100644
index 000000000..979814983
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/c++.py
@@ -0,0 +1,93 @@
+"""SCons.Tool.c++
+
+Tool-specific initialization for generic Posix C++ compilers.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/c++.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Tool
+import SCons.Defaults
+import SCons.Util
+
+compilers = ['CC', 'c++']
+
+CXXSuffixes = ['.cpp', '.cc', '.cxx', '.c++', '.C++', '.mm']
+if SCons.Util.case_sensitive_suffixes('.c', '.C'):
+ CXXSuffixes.append('.C')
+
+def iscplusplus(source):
+ if not source:
+ # Source might be None for unusual cases like SConf.
+ return 0
+ for s in source:
+ if s.sources:
+ ext = os.path.splitext(str(s.sources[0]))[1]
+ if ext in CXXSuffixes:
+ return 1
+ return 0
+
+def generate(env):
+ """
+ Add Builders and construction variables for Visual Age C++ compilers
+ to an Environment.
+ """
+ import SCons.Tool
+ import SCons.Tool.cc
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in CXXSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CXXAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ SCons.Tool.cc.add_common_cc_variables(env)
+
+ env['CXX'] = 'c++'
+ env['CXXFLAGS'] = SCons.Util.CLVar('')
+ env['CXXCOM'] = '$CXX -o $TARGET -c $CXXFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
+ env['SHCXX'] = '$CXX'
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
+ env['SHCXXCOM'] = '$SHCXX -o $TARGET -c $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
+
+ env['CPPDEFPREFIX'] = '-D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '-I'
+ env['INCSUFFIX'] = ''
+ env['SHOBJSUFFIX'] = '.os'
+ env['OBJSUFFIX'] = '.o'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
+
+ env['CXXFILESUFFIX'] = '.cc'
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/cc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/cc.py
new file mode 100644
index 000000000..ef1249d4c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/cc.py
@@ -0,0 +1,108 @@
+"""SCons.Tool.cc
+
+Tool-specific initialization for generic Posix C compilers.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/cc.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool
+import SCons.Defaults
+import SCons.Util
+
+CSuffixes = ['.c', '.m']
+if not SCons.Util.case_sensitive_suffixes('.c', '.C'):
+ CSuffixes.append('.C')
+
+def add_common_cc_variables(env):
+ """
+ Add underlying common "C compiler" variables that
+ are used by multiple tools (specifically, c++).
+ """
+ if not env.has_key('_CCCOMCOM'):
+ env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS'
+ # It's a hack to test for darwin here, but the alternative
+ # of creating an applecc.py to contain this seems overkill.
+ # Maybe someday the Apple platform will require more setup and
+ # this logic will be moved.
+ env['FRAMEWORKS'] = SCons.Util.CLVar('')
+ env['FRAMEWORKPATH'] = SCons.Util.CLVar('')
+ if env['PLATFORM'] == 'darwin':
+ env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH'
+
+ if not env.has_key('CCFLAGS'):
+ env['CCFLAGS'] = SCons.Util.CLVar('')
+
+ if not env.has_key('SHCCFLAGS'):
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
+
+def generate(env):
+ """
+ Add Builders and construction variables for C compilers to an Environment.
+ """
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in CSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+#<<<<<<< .working
+#
+# env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS'
+# # It's a hack to test for darwin here, but the alternative of creating
+# # an applecc.py to contain this seems overkill. Maybe someday the Apple
+# # platform will require more setup and this logic will be moved.
+# env['FRAMEWORKS'] = SCons.Util.CLVar('')
+# env['FRAMEWORKPATH'] = SCons.Util.CLVar('')
+# if env['PLATFORM'] == 'darwin':
+# env['_CCCOMCOM'] = env['_CCCOMCOM'] + ' $_FRAMEWORKPATH'
+#=======
+#>>>>>>> .merge-right.r1907
+
+ add_common_cc_variables(env)
+
+ env['CC'] = 'cc'
+ env['CFLAGS'] = SCons.Util.CLVar('')
+ env['CCCOM'] = '$CC -o $TARGET -c $CFLAGS $CCFLAGS $_CCCOMCOM $SOURCES'
+ env['SHCC'] = '$CC'
+ env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
+ env['SHCCCOM'] = '$SHCC -o $TARGET -c $SHCFLAGS $SHCCFLAGS $_CCCOMCOM $SOURCES'
+
+ env['CPPDEFPREFIX'] = '-D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '-I'
+ env['INCSUFFIX'] = ''
+ env['SHOBJSUFFIX'] = '.os'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 0
+
+ env['CFILESUFFIX'] = '.c'
+
+def exists(env):
+ return env.Detect('cc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/cvf.py b/tools/scons/scons-local-1.2.0/SCons/Tool/cvf.py
new file mode 100644
index 000000000..203d9e414
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/cvf.py
@@ -0,0 +1,52 @@
+"""engine.SCons.Tool.cvf
+
+Tool-specific initialization for the Compaq Visual Fortran compiler.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/cvf.py 3842 2008/12/20 22:59:52 scons"
+
+import fortran
+
+compilers = ['f90']
+
+def generate(env):
+ """Add Builders and construction variables for compaq visual fortran to an Environment."""
+
+ fortran.generate(env)
+
+ env['FORTRAN'] = 'f90'
+ env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
+ env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
+ env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
+ env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANMODFLAG $_FORTRANINCFLAGS /compile_only ${SOURCES.windows} /object:${TARGET.windows}'
+ env['OBJSUFFIX'] = '.obj'
+ env['FORTRANMODDIR'] = '${TARGET.dir}'
+ env['FORTRANMODDIRPREFIX'] = '/module:'
+ env['FORTRANMODDIRSUFFIX'] = ''
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/default.py b/tools/scons/scons-local-1.2.0/SCons/Tool/default.py
new file mode 100644
index 000000000..a105f7f0c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/default.py
@@ -0,0 +1,44 @@
+"""SCons.Tool.default
+
+Initialization with a default tool list.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/default.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool
+
+def generate(env):
+ """Add default tools."""
+ for t in SCons.Tool.tool_list(env['PLATFORM'], env):
+ SCons.Tool.Tool(t)(env)
+
+def exists(env):
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/dmd.py b/tools/scons/scons-local-1.2.0/SCons/Tool/dmd.py
new file mode 100644
index 000000000..88bff8abd
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/dmd.py
@@ -0,0 +1,218 @@
+"""SCons.Tool.dmd
+
+Tool-specific initialization for the Digital Mars D compiler.
+(http://digitalmars.com/d)
+
+Coded by Andy Friesen (andy@ikagames.com)
+15 November 2003
+
+There are a number of problems with this script at this point in time.
+The one that irritates me the most is the Windows linker setup. The D
+linker doesn't have a way to add lib paths on the commandline, as far
+as I can see. You have to specify paths relative to the SConscript or
+use absolute paths. To hack around it, add '#/blah'. This will link
+blah.lib from the directory where SConstruct resides.
+
+Compiler variables:
+ DC - The name of the D compiler to use. Defaults to dmd or gdmd,
+ whichever is found.
+ DPATH - List of paths to search for import modules.
+ DVERSIONS - List of version tags to enable when compiling.
+ DDEBUG - List of debug tags to enable when compiling.
+
+Linker related variables:
+ LIBS - List of library files to link in.
+ DLINK - Name of the linker to use. Defaults to dmd or gdmd.
+ DLINKFLAGS - List of linker flags.
+
+Lib tool variables:
+ DLIB - Name of the lib tool to use. Defaults to lib.
+ DLIBFLAGS - List of flags to pass to the lib tool.
+ LIBS - Same as for the linker. (libraries to pull into the .lib)
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/dmd.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Scanner.D
+import SCons.Tool
+
+# Adapted from c++.py
+def isD(source):
+ if not source:
+ return 0
+
+ for s in source:
+ if s.sources:
+ ext = os.path.splitext(str(s.sources[0]))[1]
+ if ext == '.d':
+ return 1
+ return 0
+
+smart_link = {}
+
+smart_lib = {}
+
+def generate(env):
+ global smart_link
+ global smart_lib
+
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ DAction = SCons.Action.Action('$DCOM', '$DCOMSTR')
+
+ static_obj.add_action('.d', DAction)
+ shared_obj.add_action('.d', DAction)
+ static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter)
+
+ dc = env.Detect(['dmd', 'gdmd'])
+ env['DC'] = dc
+ env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of$TARGET $SOURCES'
+ env['_DINCFLAGS'] = '$( ${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
+ env['_DVERFLAGS'] = '$( ${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)} $)'
+ env['_DDEBUGFLAGS'] = '$( ${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)} $)'
+ env['_DFLAGS'] = '$( ${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)} $)'
+
+ env['DPATH'] = ['#/']
+ env['DFLAGS'] = []
+ env['DVERSIONS'] = []
+ env['DDEBUG'] = []
+
+ if dc:
+ # Add the path to the standard library.
+ # This is merely for the convenience of the dependency scanner.
+ dmd_path = env.WhereIs(dc)
+ if dmd_path:
+ x = string.rindex(dmd_path, dc)
+ phobosDir = dmd_path[:x] + '/../src/phobos'
+ if os.path.isdir(phobosDir):
+ env.Append(DPATH = [phobosDir])
+
+ env['DINCPREFIX'] = '-I'
+ env['DINCSUFFIX'] = ''
+ env['DVERPREFIX'] = '-version='
+ env['DVERSUFFIX'] = ''
+ env['DDEBUGPREFIX'] = '-debug='
+ env['DDEBUGSUFFIX'] = ''
+ env['DFLAGPREFIX'] = '-'
+ env['DFLAGSUFFIX'] = ''
+ env['DFILESUFFIX'] = '.d'
+
+ # Need to use the Digital Mars linker/lib on windows.
+ # *nix can just use GNU link.
+ if env['PLATFORM'] == 'win32':
+ env['DLINK'] = '$DC'
+ env['DLINKCOM'] = '$DLINK -of$TARGET $SOURCES $DFLAGS $DLINKFLAGS $_DLINKLIBFLAGS'
+ env['DLIB'] = 'lib'
+ env['DLIBCOM'] = '$DLIB $_DLIBFLAGS -c $TARGET $SOURCES $_DLINKLIBFLAGS'
+
+ env['_DLINKLIBFLAGS'] = '$( ${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
+ env['_DLIBFLAGS'] = '$( ${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)} $)'
+ env['DLINKFLAGS'] = []
+ env['DLIBLINKPREFIX'] = ''
+ env['DLIBLINKSUFFIX'] = '.lib'
+ env['DLIBFLAGPREFIX'] = '-'
+ env['DLIBFLAGSUFFIX'] = ''
+ env['DLINKFLAGPREFIX'] = '-'
+ env['DLINKFLAGSUFFIX'] = ''
+
+ SCons.Tool.createStaticLibBuilder(env)
+
+ # Basically, we hijack the link and ar builders with our own.
+ # these builders check for the presence of D source, and swap out
+ # the system's defaults for the Digital Mars tools. If there's no D
+ # source, then we silently return the previous settings.
+ linkcom = env.get('LINKCOM')
+ try:
+ env['SMART_LINKCOM'] = smart_link[linkcom]
+ except KeyError:
+ def _smartLink(source, target, env, for_signature,
+ defaultLinker=linkcom):
+ if isD(source):
+ # XXX I'm not sure how to add a $DLINKCOMSTR variable
+ # so that it works with this _smartLink() logic,
+ # and I don't have a D compiler/linker to try it out,
+ # so we'll leave it alone for now.
+ return '$DLINKCOM'
+ else:
+ return defaultLinker
+ env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink
+
+ arcom = env.get('ARCOM')
+ try:
+ env['SMART_ARCOM'] = smart_lib[arcom]
+ except KeyError:
+ def _smartLib(source, target, env, for_signature,
+ defaultLib=arcom):
+ if isD(source):
+ # XXX I'm not sure how to add a $DLIBCOMSTR variable
+ # so that it works with this _smartLib() logic, and
+ # I don't have a D compiler/archiver to try it out,
+ # so we'll leave it alone for now.
+ return '$DLIBCOM'
+ else:
+ return defaultLib
+ env['SMART_ARCOM'] = smart_lib[arcom] = _smartLib
+
+ # It is worth noting that the final space in these strings is
+ # absolutely pivotal. SCons sees these as actions and not generators
+ # if it is not there. (very bad)
+ env['ARCOM'] = '$SMART_ARCOM '
+ env['LINKCOM'] = '$SMART_LINKCOM '
+ else: # assuming linux
+ linkcom = env.get('LINKCOM')
+ try:
+ env['SMART_LINKCOM'] = smart_link[linkcom]
+ except KeyError:
+ def _smartLink(source, target, env, for_signature,
+ defaultLinker=linkcom, dc=dc):
+ if isD(source):
+ try:
+ libs = env['LIBS']
+ except KeyError:
+ libs = []
+ if 'phobos' not in libs:
+ if dc is 'dmd':
+ env.Append(LIBS = ['phobos'])
+ elif dc is 'gdmd':
+ env.Append(LIBS = ['gphobos'])
+ if 'pthread' not in libs:
+ env.Append(LIBS = ['pthread'])
+ if 'm' not in libs:
+ env.Append(LIBS = ['m'])
+ return defaultLinker
+ env['SMART_LINKCOM'] = smart_link[linkcom] = _smartLink
+
+ env['LINKCOM'] = '$SMART_LINKCOM '
+
+def exists(env):
+ return env.Detect(['dmd', 'gdmd'])
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/dvi.py b/tools/scons/scons-local-1.2.0/SCons/Tool/dvi.py
new file mode 100644
index 000000000..af65671ea
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/dvi.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.dvi
+
+Common DVI Builder definition for various other Tool modules that use it.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/dvi.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Builder
+import SCons.Tool
+
+DVIBuilder = None
+
+def generate(env):
+ try:
+ env['BUILDERS']['DVI']
+ except KeyError:
+ global DVIBuilder
+
+ if DVIBuilder is None:
+ # The suffix is hard-coded to '.dvi', not configurable via a
+ # construction variable like $DVISUFFIX, because the output
+ # file name is hard-coded within TeX.
+ DVIBuilder = SCons.Builder.Builder(action = {},
+ source_scanner = SCons.Tool.LaTeXScanner,
+ suffix = '.dvi',
+ emitter = {},
+ source_ext_match = None)
+
+ env['BUILDERS']['DVI'] = DVIBuilder
+
+def exists(env):
+ # This only puts a skeleton Builder in place, so if someone
+ # references this Tool directly, it's always "available."
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/dvipdf.py b/tools/scons/scons-local-1.2.0/SCons/Tool/dvipdf.py
new file mode 100644
index 000000000..821d125e6
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/dvipdf.py
@@ -0,0 +1,119 @@
+"""SCons.Tool.dvipdf
+
+Tool-specific initialization for dvipdf.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/dvipdf.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Tool.pdf
+import SCons.Tool.tex
+import SCons.Util
+
+_null = SCons.Scanner.LaTeX._null
+
+def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None):
+ """A builder for DVI files that sets the TEXPICTS environment
+ variable before running dvi2ps or dvipdf."""
+
+ try:
+ abspath = source[0].attributes.path
+ except AttributeError :
+ abspath = ''
+
+ saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath)
+
+ result = XXXDviAction(target, source, env)
+
+ if saved_env is _null:
+ try:
+ del env['ENV']['TEXPICTS']
+ except KeyError:
+ pass # was never set
+ else:
+ env['ENV']['TEXPICTS'] = saved_env
+
+ return result
+
+def DviPdfFunction(target = None, source= None, env=None):
+ result = DviPdfPsFunction(PDFAction,target,source,env)
+ return result
+
+def DviPdfStrFunction(target = None, source= None, env=None):
+ """A strfunction for dvipdf that returns the appropriate
+ command string for the no_exec options."""
+ if env.GetOption("no_exec"):
+ result = env.subst('$DVIPDFCOM',0,target,source)
+ else:
+ result = ''
+ return result
+
+PDFAction = None
+DVIPDFAction = None
+
+def PDFEmitter(target, source, env):
+ """Strips any .aux or .log files from the input source list.
+ These are created by the TeX Builder that in all likelihood was
+ used to generate the .dvi file we're using as input, and we only
+ care about the .dvi file.
+ """
+ def strip_suffixes(n):
+ return not SCons.Util.splitext(str(n))[1] in ['.aux', '.log']
+ source = filter(strip_suffixes, source)
+ return (target, source)
+
+def generate(env):
+ """Add Builders and construction variables for dvipdf to an Environment."""
+ global PDFAction
+ if PDFAction is None:
+ PDFAction = SCons.Action.Action('$DVIPDFCOM', '$DVIPDFCOMSTR')
+
+ global DVIPDFAction
+ if DVIPDFAction is None:
+ DVIPDFAction = SCons.Action.Action(DviPdfFunction, strfunction = DviPdfStrFunction)
+
+ import pdf
+ pdf.generate(env)
+
+ bld = env['BUILDERS']['PDF']
+ bld.add_action('.dvi', DVIPDFAction)
+ bld.add_emitter('.dvi', PDFEmitter)
+
+ env['DVIPDF'] = 'dvipdf'
+ env['DVIPDFFLAGS'] = SCons.Util.CLVar('')
+ env['DVIPDFCOM'] = 'cd ${TARGET.dir} && $DVIPDF $DVIPDFFLAGS ${SOURCE.file} ${TARGET.file}'
+
+ # Deprecated synonym.
+ env['PDFCOM'] = ['$DVIPDFCOM']
+
+def exists(env):
+ return env.Detect('dvipdf')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/dvips.py b/tools/scons/scons-local-1.2.0/SCons/Tool/dvips.py
new file mode 100644
index 000000000..db763f1d0
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/dvips.py
@@ -0,0 +1,88 @@
+"""SCons.Tool.dvips
+
+Tool-specific initialization for dvips.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/dvips.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Tool.dvipdf
+import SCons.Util
+
+def DviPsFunction(target = None, source= None, env=None):
+ result = SCons.Tool.dvipdf.DviPdfPsFunction(PSAction,target,source,env)
+ return result
+
+def DviPsStrFunction(target = None, source= None, env=None):
+ """A strfunction for dvipdf that returns the appropriate
+ command string for the no_exec options."""
+ if env.GetOption("no_exec"):
+ result = env.subst('$PSCOM',0,target,source)
+ else:
+ result = ''
+ return result
+
+PSAction = None
+DVIPSAction = None
+PSBuilder = None
+
+def generate(env):
+ """Add Builders and construction variables for dvips to an Environment."""
+ global PSAction
+ if PSAction is None:
+ PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR')
+
+ global DVIPSAction
+ if DVIPSAction is None:
+ DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction)
+
+ global PSBuilder
+ if PSBuilder is None:
+ PSBuilder = SCons.Builder.Builder(action = PSAction,
+ prefix = '$PSPREFIX',
+ suffix = '$PSSUFFIX',
+ src_suffix = '.dvi',
+ src_builder = 'DVI',
+ single_source=True)
+
+ env['BUILDERS']['PostScript'] = PSBuilder
+
+ env['DVIPS'] = 'dvips'
+ env['DVIPSFLAGS'] = SCons.Util.CLVar('')
+ # I'm not quite sure I got the directories and filenames right for variant_dir
+ # We need to be in the correct directory for the sake of latex \includegraphics eps included files.
+ env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}'
+ env['PSPREFIX'] = ''
+ env['PSSUFFIX'] = '.ps'
+
+def exists(env):
+ return env.Detect('dvips')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/f77.py b/tools/scons/scons-local-1.2.0/SCons/Tool/f77.py
new file mode 100644
index 000000000..21ab6d82d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/f77.py
@@ -0,0 +1,56 @@
+"""engine.SCons.Tool.f77
+
+Tool-specific initialization for the generic Posix f77 Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/f77.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Scanner.Fortran
+import SCons.Tool
+import SCons.Util
+from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env
+
+compilers = ['f77']
+
+def generate(env):
+ add_all_to_env(env)
+ add_f77_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'f77'
+ env['F77'] = fcomp
+ env['SHF77'] = fcomp
+
+ env['FORTRAN'] = fcomp
+ env['SHFORTRAN'] = fcomp
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/f90.py b/tools/scons/scons-local-1.2.0/SCons/Tool/f90.py
new file mode 100644
index 000000000..1078d2cca
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/f90.py
@@ -0,0 +1,56 @@
+"""engine.SCons.Tool.f90
+
+Tool-specific initialization for the generic Posix f90 Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/f90.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Scanner.Fortran
+import SCons.Tool
+import SCons.Util
+from SCons.Tool.FortranCommon import add_all_to_env, add_f90_to_env
+
+compilers = ['f90']
+
+def generate(env):
+ add_all_to_env(env)
+ add_f90_to_env(env)
+
+ fc = env.Detect(compilers) or 'f90'
+ env['F90'] = fc
+ env['SHF90'] = fc
+
+ env['FORTRAN'] = fc
+ env['SHFORTRAN'] = fc
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/f95.py b/tools/scons/scons-local-1.2.0/SCons/Tool/f95.py
new file mode 100644
index 000000000..012930ca7
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/f95.py
@@ -0,0 +1,57 @@
+"""engine.SCons.Tool.f95
+
+Tool-specific initialization for the generic Posix f95 Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/f95.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+import fortran
+from SCons.Tool.FortranCommon import add_all_to_env, add_f95_to_env
+
+compilers = ['f95']
+
+def generate(env):
+ add_all_to_env(env)
+ add_f95_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'f95'
+ env['F95'] = fcomp
+ env['SHF95'] = fcomp
+
+ env['FORTRAN'] = fcomp
+ env['SHFORTRAN'] = fcomp
+
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/filesystem.py b/tools/scons/scons-local-1.2.0/SCons/Tool/filesystem.py
new file mode 100644
index 000000000..dbab56202
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/filesystem.py
@@ -0,0 +1,92 @@
+"""SCons.Tool.filesystem
+
+Tool-specific initialization for the filesystem tools.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/filesystem.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons
+from SCons.Tool.install import copyFunc
+
+copyToBuilder, copyAsBuilder = None, None
+
+def copyto_emitter(target, source, env):
+ """ changes the path of the source to be under the target (which
+ are assumed to be directories.
+ """
+ n_target = []
+
+ for t in target:
+ n_target = n_target + map( lambda s, t=t: t.File( str( s ) ), source )
+
+ return (n_target, source)
+
+def copy_action_func(target, source, env):
+ assert( len(target) == len(source) ), "\ntarget: %s\nsource: %s" %(map(str, target),map(str, source))
+
+ for t, s in zip(target, source):
+ if copyFunc(t.get_path(), s.get_path(), env):
+ return 1
+
+ return 0
+
+def copy_action_str(target, source, env):
+ return env.subst_target_source(env['COPYSTR'], 0, target, source)
+
+copy_action = SCons.Action.Action( copy_action_func, copy_action_str )
+
+def generate(env):
+ try:
+ env['BUILDERS']['CopyTo']
+ env['BUILDERS']['CopyAs']
+ except KeyError, e:
+ global copyToBuilder
+ if copyToBuilder is None:
+ copyToBuilder = SCons.Builder.Builder(
+ action = copy_action,
+ target_factory = env.fs.Dir,
+ source_factory = env.fs.Entry,
+ multi = 1,
+ emitter = [ copyto_emitter, ] )
+
+ global copyAsBuilder
+ if copyAsBuilder is None:
+ copyAsBuilder = SCons.Builder.Builder(
+ action = copy_action,
+ target_factory = env.fs.Entry,
+ source_factory = env.fs.Entry )
+
+ env['BUILDERS']['CopyTo'] = copyToBuilder
+ env['BUILDERS']['CopyAs'] = copyAsBuilder
+
+ env['COPYSTR'] = 'Copy file(s): "$SOURCES" to "$TARGETS"'
+
+def exists(env):
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/fortran.py b/tools/scons/scons-local-1.2.0/SCons/Tool/fortran.py
new file mode 100644
index 000000000..aa53cf61b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/fortran.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.fortran
+
+Tool-specific initialization for a generic Posix f77/f90 Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/fortran.py 3842 2008/12/20 22:59:52 scons"
+
+import re
+import string
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Scanner.Fortran
+import SCons.Tool
+import SCons.Util
+from SCons.Tool.FortranCommon import add_all_to_env, add_fortran_to_env
+
+compilers = ['f95', 'f90', 'f77']
+
+def generate(env):
+ add_all_to_env(env)
+ add_fortran_to_env(env)
+
+ fc = env.Detect(compilers) or 'f77'
+ env['SHFORTRAN'] = fc
+ env['FORTRAN'] = fc
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/g++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/g++.py
new file mode 100644
index 000000000..feb39519e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/g++.py
@@ -0,0 +1,84 @@
+"""SCons.Tool.g++
+
+Tool-specific initialization for g++.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/g++.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+import subprocess
+
+import SCons.Tool
+import SCons.Util
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+compilers = ['g++']
+
+def generate(env):
+ """Add Builders and construction variables for g++ to an Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ cplusplus.generate(env)
+
+ env['CXX'] = env.Detect(compilers)
+
+ # platform specific settings
+ if env['PLATFORM'] == 'aix':
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc')
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+ env['SHOBJSUFFIX'] = '$OBJSUFFIX'
+ elif env['PLATFORM'] == 'hpux':
+ env['SHOBJSUFFIX'] = '.pic.o'
+ elif env['PLATFORM'] == 'sunos':
+ env['SHOBJSUFFIX'] = '.pic.o'
+ # determine compiler version
+ if env['CXX']:
+ #pipe = SCons.Action._subproc(env, [env['CXX'], '-dumpversion'],
+ pipe = SCons.Action._subproc(env, [env['CXX'], '--version'],
+ stdin = 'devnull',
+ stderr = 'devnull',
+ stdout = subprocess.PIPE)
+ if pipe.wait() != 0: return
+ # -dumpversion was added in GCC 3.0. As long as we're supporting
+ # GCC versions older than that, we should use --version and a
+ # regular expression.
+ #line = pipe.stdout.read().strip()
+ #if line:
+ # env['CXXVERSION'] = line
+ line = pipe.stdout.readline()
+ match = re.search(r'[0-9]+(\.[0-9]+)+', line)
+ if match:
+ env['CXXVERSION'] = match.group(0)
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/g77.py b/tools/scons/scons-local-1.2.0/SCons/Tool/g77.py
new file mode 100644
index 000000000..effc9fcfc
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/g77.py
@@ -0,0 +1,67 @@
+"""engine.SCons.Tool.g77
+
+Tool-specific initialization for g77.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/g77.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+from SCons.Tool.FortranCommon import add_all_to_env, add_f77_to_env
+
+compilers = ['g77', 'f77']
+
+def generate(env):
+ """Add Builders and construction variables for g77 to an Environment."""
+ add_all_to_env(env)
+ add_f77_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'g77'
+ if env['PLATFORM'] in ['cygwin', 'win32']:
+ env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS')
+ env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS')
+ else:
+ env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -fPIC')
+ env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -fPIC')
+
+ env['FORTRAN'] = fcomp
+ env['SHFORTRAN'] = '$FORTRAN'
+
+ env['F77'] = fcomp
+ env['SHF77'] = '$F77'
+
+ env['INCFORTRANPREFIX'] = "-I"
+ env['INCFORTRANSUFFIX'] = ""
+
+ env['INCF77PREFIX'] = "-I"
+ env['INCF77SUFFIX'] = ""
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/gas.py b/tools/scons/scons-local-1.2.0/SCons/Tool/gas.py
new file mode 100644
index 000000000..5595e9e13
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/gas.py
@@ -0,0 +1,47 @@
+"""SCons.Tool.gas
+
+Tool-specific initialization for as, the Gnu assembler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/gas.py 3842 2008/12/20 22:59:52 scons"
+
+as_module = __import__('as', globals(), locals(), [])
+
+assemblers = ['as', 'gas']
+
+def generate(env):
+ """Add Builders and construction variables for as to an Environment."""
+ as_module.generate(env)
+
+ env['AS'] = env.Detect(assemblers) or 'as'
+
+def exists(env):
+ return env.Detect(assemblers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/gcc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/gcc.py
new file mode 100644
index 000000000..db07575b2
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/gcc.py
@@ -0,0 +1,74 @@
+"""SCons.Tool.gcc
+
+Tool-specific initialization for gcc.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/gcc.py 3842 2008/12/20 22:59:52 scons"
+
+import cc
+import os
+import re
+import subprocess
+
+import SCons.Util
+
+compilers = ['gcc', 'cc']
+
+def generate(env):
+ """Add Builders and construction variables for gcc to an Environment."""
+ cc.generate(env)
+
+ env['CC'] = env.Detect(compilers) or 'gcc'
+ if env['PLATFORM'] in ['cygwin', 'win32']:
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
+ else:
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
+ # determine compiler version
+ if env['CC']:
+ #pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'],
+ pipe = SCons.Action._subproc(env, [env['CC'], '--version'],
+ stdin = 'devnull',
+ stderr = 'devnull',
+ stdout = subprocess.PIPE)
+ if pipe.wait() != 0: return
+ # -dumpversion was added in GCC 3.0. As long as we're supporting
+ # GCC versions older than that, we should use --version and a
+ # regular expression.
+ #line = pipe.stdout.read().strip()
+ #if line:
+ # env['CCVERSION'] = line
+ line = pipe.stdout.readline()
+ match = re.search(r'[0-9]+(\.[0-9]+)+', line)
+ if match:
+ env['CCVERSION'] = match.group(0)
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/gfortran.py b/tools/scons/scons-local-1.2.0/SCons/Tool/gfortran.py
new file mode 100644
index 000000000..7da19e4fd
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/gfortran.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.gfortran
+
+Tool-specific initialization for gfortran, the GNU Fortran 95/Fortran
+2003 compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/gfortran.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import fortran
+
+def generate(env):
+ """Add Builders and construction variables for gfortran to an
+ Environment."""
+ fortran.generate(env)
+
+ for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
+ env['%s' % dialect] = 'gfortran'
+ env['SH%s' % dialect] = '$%s' % dialect
+ if env['PLATFORM'] in ['cygwin', 'win32']:
+ env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS' % dialect)
+ else:
+ env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
+
+ env['INC%sPREFIX' % dialect] = "-I"
+ env['INC%sSUFFIX' % dialect] = ""
+
+def exists(env):
+ return env.Detect('gfortran')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/gnulink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/gnulink.py
new file mode 100644
index 000000000..de95ee1bb
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/gnulink.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.gnulink
+
+Tool-specific initialization for the gnu linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/gnulink.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import link
+
+linkers = ['g++', 'gcc']
+
+def generate(env):
+ """Add Builders and construction variables for gnulink to an Environment."""
+ link.generate(env)
+
+ if env['PLATFORM'] == 'hpux':
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared -fPIC')
+
+ # __RPATH is set to $_RPATH in the platform specification if that
+ # platform supports it.
+ env.Append(LINKFLAGS=['$__RPATH'])
+ env['RPATHPREFIX'] = '-Wl,-rpath='
+ env['RPATHSUFFIX'] = ''
+ env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
+
+def exists(env):
+ return env.Detect(linkers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/gs.py b/tools/scons/scons-local-1.2.0/SCons/Tool/gs.py
new file mode 100644
index 000000000..c52440af6
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/gs.py
@@ -0,0 +1,75 @@
+"""SCons.Tool.gs
+
+Tool-specific initialization for Ghostscript.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/gs.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Platform
+import SCons.Util
+
+# Ghostscript goes by different names on different platforms...
+platform = SCons.Platform.platform_default()
+
+if platform == 'os2':
+ gs = 'gsos2'
+elif platform == 'win32':
+ gs = 'gswin32c'
+else:
+ gs = 'gs'
+
+GhostscriptAction = None
+
+def generate(env):
+ """Add Builders and construction variables for Ghostscript to an
+ Environment."""
+
+ global GhostscriptAction
+ if GhostscriptAction is None:
+ GhostscriptAction = SCons.Action.Action('$GSCOM', '$GSCOMSTR')
+
+ import pdf
+ pdf.generate(env)
+
+ bld = env['BUILDERS']['PDF']
+ bld.add_action('.ps', GhostscriptAction)
+
+ env['GS'] = gs
+ env['GSFLAGS'] = SCons.Util.CLVar('-dNOPAUSE -dBATCH -sDEVICE=pdfwrite')
+ env['GSCOM'] = '$GS $GSFLAGS -sOutputFile=$TARGET $SOURCES'
+
+
+def exists(env):
+ if env.has_key('PS2PDF'):
+ return env.Detect(env['PS2PDF'])
+ else:
+ return env.Detect(gs) or SCons.Util.WhereIs(gs)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/hpc++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/hpc++.py
new file mode 100644
index 000000000..299c701ed
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/hpc++.py
@@ -0,0 +1,79 @@
+"""SCons.Tool.hpc++
+
+Tool-specific initialization for c++ on HP/UX.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/hpc++.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+
+import SCons.Util
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+acc = None
+
+# search for the acc compiler and linker front end
+
+try:
+ dirs = os.listdir('/opt')
+except (IOError, OSError):
+ # Not being able to read the directory because it doesn't exist
+ # (IOError) or isn't readable (OSError) is okay.
+ dirs = []
+
+for dir in dirs:
+ cc = '/opt/' + dir + '/bin/aCC'
+ if os.path.exists(cc):
+ acc = cc
+ break
+
+
+def generate(env):
+ """Add Builders and construction variables for g++ to an Environment."""
+ cplusplus.generate(env)
+
+ if acc:
+ env['CXX'] = acc or 'aCC'
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z')
+ # determine version of aCC
+ line = os.popen(acc + ' -V 2>&1').readline().rstrip()
+ if string.find(line, 'aCC: HP ANSI C++') == 0:
+ env['CXXVERSION'] = string.split(line)[-1]
+
+ if env['PLATFORM'] == 'cygwin':
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
+ else:
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS +Z')
+
+def exists(env):
+ return acc
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/hpcc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/hpcc.py
new file mode 100644
index 000000000..a4da9568b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/hpcc.py
@@ -0,0 +1,47 @@
+"""SCons.Tool.hpcc
+
+Tool-specific initialization for HP aCC and cc.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/hpcc.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import cc
+
+def generate(env):
+ """Add Builders and construction variables for aCC & cc to an Environment."""
+ cc.generate(env)
+
+ env['CXX'] = 'aCC'
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS +Z')
+
+def exists(env):
+ return env.Detect('aCC')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/hplink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/hplink.py
new file mode 100644
index 000000000..0eb5b0a6b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/hplink.py
@@ -0,0 +1,71 @@
+"""SCons.Tool.hplink
+
+Tool-specific initialization for the HP linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/hplink.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+
+import SCons.Util
+
+import link
+
+ccLinker = None
+
+# search for the acc compiler and linker front end
+
+try:
+ dirs = os.listdir('/opt')
+except (IOError, OSError):
+ # Not being able to read the directory because it doesn't exist
+ # (IOError) or isn't readable (OSError) is okay.
+ dirs = []
+
+for dir in dirs:
+ linker = '/opt/' + dir + '/bin/aCC'
+ if os.path.exists(linker):
+ ccLinker = linker
+ break
+
+def generate(env):
+ """
+ Add Builders and construction variables for Visual Age linker to
+ an Environment.
+ """
+ link.generate(env)
+
+ env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,+s -Wl,+vnocompatwarnings')
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -b')
+ env['SHLIBSUFFIX'] = '.sl'
+
+def exists(env):
+ return ccLinker
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/icc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/icc.py
new file mode 100644
index 000000000..ac6d6aade
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/icc.py
@@ -0,0 +1,53 @@
+"""engine.SCons.Tool.icc
+
+Tool-specific initialization for the OS/2 icc compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/icc.py 3842 2008/12/20 22:59:52 scons"
+
+import cc
+
+def generate(env):
+ """Add Builders and construction variables for the OS/2 to an Environment."""
+ cc.generate(env)
+
+ env['CC'] = 'icc'
+ env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'
+ env['CXXCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo$TARGET'
+ env['CPPDEFPREFIX'] = '/D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '/I'
+ env['INCSUFFIX'] = ''
+ env['CFILESUFFIX'] = '.c'
+ env['CXXFILESUFFIX'] = '.cc'
+
+def exists(env):
+ return env.Detect('icc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/icl.py b/tools/scons/scons-local-1.2.0/SCons/Tool/icl.py
new file mode 100644
index 000000000..322de7935
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/icl.py
@@ -0,0 +1,46 @@
+"""engine.SCons.Tool.icl
+
+Tool-specific initialization for the Intel C/C++ compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/icl.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool.intelc
+
+# This has been completely superceded by intelc.py, which can
+# handle both Windows and Linux versions.
+
+def generate(*args, **kw):
+ """Add Builders and construction variables for icl to an Environment."""
+ return apply(SCons.Tool.intelc.generate, args, kw)
+
+def exists(*args, **kw):
+ return apply(SCons.Tool.intelc.exists, args, kw)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/ifl.py b/tools/scons/scons-local-1.2.0/SCons/Tool/ifl.py
new file mode 100644
index 000000000..bfb157e6e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/ifl.py
@@ -0,0 +1,66 @@
+"""SCons.Tool.ifl
+
+Tool-specific initialization for the Intel Fortran compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/ifl.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+from SCons.Scanner.Fortran import FortranScan
+from FortranCommon import add_all_to_env
+
+def generate(env):
+ """Add Builders and construction variables for ifl to an Environment."""
+ fscan = FortranScan("FORTRANPATH")
+ SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
+ SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
+
+ if not env.has_key('FORTRANFILESUFFIXES'):
+ env['FORTRANFILESUFFIXES'] = ['.i']
+ else:
+ env['FORTRANFILESUFFIXES'].append('.i')
+
+ if not env.has_key('F90FILESUFFIXES'):
+ env['F90FILESUFFIXES'] = ['.i90']
+ else:
+ env['F90FILESUFFIXES'].append('.i90')
+
+ add_all_to_env(env)
+
+ env['FORTRAN'] = 'ifl'
+ env['SHFORTRAN'] = '$FORTRAN'
+ env['FORTRANCOM'] = '$FORTRAN $FORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
+ env['FORTRANPPCOM'] = '$FORTRAN $FORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
+ env['SHFORTRANCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
+ env['SHFORTRANPPCOM'] = '$SHFORTRAN $SHFORTRANFLAGS $CPPFLAGS $_CPPDEFFLAGS $_FORTRANINCFLAGS /c $SOURCES /Fo$TARGET'
+
+def exists(env):
+ return env.Detect('ifl')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/ifort.py b/tools/scons/scons-local-1.2.0/SCons/Tool/ifort.py
new file mode 100644
index 000000000..17b7bf7b3
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/ifort.py
@@ -0,0 +1,83 @@
+"""SCons.Tool.ifort
+
+Tool-specific initialization for newer versions of the Intel Fortran Compiler
+for Linux/Windows (and possibly Mac OS X).
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/ifort.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+
+import SCons.Defaults
+from SCons.Scanner.Fortran import FortranScan
+from FortranCommon import add_all_to_env
+
+def generate(env):
+ """Add Builders and construction variables for ifort to an Environment."""
+ # ifort supports Fortran 90 and Fortran 95
+ # Additionally, ifort recognizes more file extensions.
+ fscan = FortranScan("FORTRANPATH")
+ SCons.Tool.SourceFileScanner.add_scanner('.i', fscan)
+ SCons.Tool.SourceFileScanner.add_scanner('.i90', fscan)
+
+ if not env.has_key('FORTRANFILESUFFIXES'):
+ env['FORTRANFILESUFFIXES'] = ['.i']
+ else:
+ env['FORTRANFILESUFFIXES'].append('.i')
+
+ if not env.has_key('F90FILESUFFIXES'):
+ env['F90FILESUFFIXES'] = ['.i90']
+ else:
+ env['F90FILESUFFIXES'].append('.i90')
+
+ add_all_to_env(env)
+
+ fc = 'ifort'
+
+ for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
+ env['%s' % dialect] = fc
+ env['SH%s' % dialect] = '$%s' % dialect
+ env['SH%sFLAGS' % dialect] = SCons.Util.CLVar('$%sFLAGS -fPIC' % dialect)
+
+ if env['PLATFORM'] == 'win32':
+ # On Windows, the ifort compiler specifies the object on the
+ # command line with -object:, not -o. Massage the necessary
+ # command-line construction variables.
+ for dialect in ['F77', 'F90', 'FORTRAN', 'F95']:
+ for var in ['%sCOM' % dialect, '%sPPCOM' % dialect,
+ 'SH%sCOM' % dialect, 'SH%sPPCOM' % dialect]:
+ env[var] = string.replace(env[var], '-o $TARGET', '-object:$TARGET')
+ env['FORTRANMODDIRPREFIX'] = "/module:"
+ else:
+ env['FORTRANMODDIRPREFIX'] = "-module "
+
+def exists(env):
+ return env.Detect('ifort')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/ilink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/ilink.py
new file mode 100644
index 000000000..b443a6b68
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/ilink.py
@@ -0,0 +1,53 @@
+"""SCons.Tool.ilink
+
+Tool-specific initialization for the OS/2 ilink linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/ilink.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for ilink to an Environment."""
+ SCons.Tool.createProgBuilder(env)
+
+ env['LINK'] = 'ilink'
+ env['LINKFLAGS'] = SCons.Util.CLVar('')
+ env['LINKCOM'] = '$LINK $LINKFLAGS /O:$TARGET $SOURCES $( $_LIBDIRFLAGS $) $_LIBFLAGS'
+ env['LIBDIRPREFIX']='/LIBPATH:'
+ env['LIBDIRSUFFIX']=''
+ env['LIBLINKPREFIX']=''
+ env['LIBLINKSUFFIX']='$LIBSUFFIX'
+
+def exists(env):
+ return env.Detect('ilink')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/ilink32.py b/tools/scons/scons-local-1.2.0/SCons/Tool/ilink32.py
new file mode 100644
index 000000000..f357bec67
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/ilink32.py
@@ -0,0 +1,54 @@
+"""SCons.Tool.ilink32
+
+XXX
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/ilink32.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool
+import SCons.Tool.bcc32
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for ilink to an
+ Environment."""
+ SCons.Tool.createSharedLibBuilder(env)
+ SCons.Tool.createProgBuilder(env)
+
+ env['LINK'] = '$CC'
+ env['LINKFLAGS'] = SCons.Util.CLVar('')
+ env['LINKCOM'] = '$LINK -q $LINKFLAGS $SOURCES $LIBS'
+ env['LIBDIRPREFIX']=''
+ env['LIBDIRSUFFIX']=''
+ env['LIBLINKPREFIX']=''
+ env['LIBLINKSUFFIX']='$LIBSUFFIX'
+
+
+def exists(env):
+ # Uses bcc32 to do linking as it generally knows where the standard
+ # LIBS are and set up the linking correctly
+ return SCons.Tool.bcc32.findIt('bcc32', env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/install.py b/tools/scons/scons-local-1.2.0/SCons/Tool/install.py
new file mode 100644
index 000000000..be36be08c
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/install.py
@@ -0,0 +1,223 @@
+"""SCons.Tool.install
+
+Tool-specific initialization for the install tool.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/install.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import shutil
+import stat
+
+import SCons.Action
+from SCons.Util import make_path_relative
+
+#
+# We keep track of *all* installed files.
+_INSTALLED_FILES = []
+_UNIQUE_INSTALLED_FILES = None
+
+#
+# Functions doing the actual work of the Install Builder.
+#
+def copyFunc(dest, source, env):
+ """Install a source file or directory into a destination by copying,
+ (including copying permission/mode bits)."""
+
+ if os.path.isdir(source):
+ if os.path.exists(dest):
+ if not os.path.isdir(dest):
+ raise SCons.Errors.UserError, "cannot overwrite non-directory `%s' with a directory `%s'" % (str(dest), str(source))
+ else:
+ parent = os.path.split(dest)[0]
+ if not os.path.exists(parent):
+ os.makedirs(parent)
+ shutil.copytree(source, dest)
+ else:
+ shutil.copy2(source, dest)
+ st = os.stat(source)
+ os.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE)
+
+ return 0
+
+def installFunc(target, source, env):
+ """Install a source file into a target using the function specified
+ as the INSTALL construction variable."""
+ try:
+ install = env['INSTALL']
+ except KeyError:
+ raise SCons.Errors.UserError('Missing INSTALL construction variable.')
+
+ assert len(target)==len(source), \
+ "Installing source %s into target %s: target and source lists must have same length."%(map(str, source), map(str, target))
+ for t,s in zip(target,source):
+ if install(t.get_path(),s.get_path(),env):
+ return 1
+
+ return 0
+
+def stringFunc(target, source, env):
+ installstr = env.get('INSTALLSTR')
+ if installstr:
+ return env.subst_target_source(installstr, 0, target, source)
+ target = str(target[0])
+ source = str(source[0])
+ if os.path.isdir(source):
+ type = 'directory'
+ else:
+ type = 'file'
+ return 'Install %s: "%s" as "%s"' % (type, source, target)
+
+#
+# Emitter functions
+#
+def add_targets_to_INSTALLED_FILES(target, source, env):
+ """ an emitter that adds all target files to the list stored in the
+ _INSTALLED_FILES global variable. This way all installed files of one
+ scons call will be collected.
+ """
+ global _INSTALLED_FILES, _UNIQUE_INSTALLED_FILES
+ _INSTALLED_FILES.extend(target)
+ _UNIQUE_INSTALLED_FILES = None
+ return (target, source)
+
+class DESTDIR_factory:
+ """ a node factory, where all files will be relative to the dir supplied
+ in the constructor.
+ """
+ def __init__(self, env, dir):
+ self.env = env
+ self.dir = env.arg2nodes( dir, env.fs.Dir )[0]
+
+ def Entry(self, name):
+ name = make_path_relative(name)
+ return self.dir.Entry(name)
+
+ def Dir(self, name):
+ name = make_path_relative(name)
+ return self.dir.Dir(name)
+
+#
+# The Builder Definition
+#
+install_action = SCons.Action.Action(installFunc, stringFunc)
+installas_action = SCons.Action.Action(installFunc, stringFunc)
+
+BaseInstallBuilder = None
+
+def InstallBuilderWrapper(env, target=None, source=None, dir=None, **kw):
+ if target and dir:
+ import SCons.Errors
+ raise SCons.Errors.UserError, "Both target and dir defined for Install(), only one may be defined."
+ if not dir:
+ dir=target
+
+ import SCons.Script
+ install_sandbox = SCons.Script.GetOption('install_sandbox')
+ if install_sandbox:
+ target_factory = DESTDIR_factory(env, install_sandbox)
+ else:
+ target_factory = env.fs
+
+ try:
+ dnodes = env.arg2nodes(dir, target_factory.Dir)
+ except TypeError:
+ raise SCons.Errors.UserError, "Target `%s' of Install() is a file, but should be a directory. Perhaps you have the Install() arguments backwards?" % str(dir)
+ sources = env.arg2nodes(source, env.fs.Entry)
+ tgt = []
+ for dnode in dnodes:
+ for src in sources:
+ # Prepend './' so the lookup doesn't interpret an initial
+ # '#' on the file name portion as meaning the Node should
+ # be relative to the top-level SConstruct directory.
+ target = env.fs.Entry('.'+os.sep+src.name, dnode)
+ #tgt.extend(BaseInstallBuilder(env, target, src, **kw))
+ tgt.extend(apply(BaseInstallBuilder, (env, target, src), kw))
+ return tgt
+
+def InstallAsBuilderWrapper(env, target=None, source=None, **kw):
+ result = []
+ for src, tgt in map(lambda x, y: (x, y), source, target):
+ #result.extend(BaseInstallBuilder(env, tgt, src, **kw))
+ result.extend(apply(BaseInstallBuilder, (env, tgt, src), kw))
+ return result
+
+added = None
+
+def generate(env):
+
+ from SCons.Script import AddOption, GetOption
+ global added
+ if not added:
+ added = 1
+ AddOption('--install-sandbox',
+ dest='install_sandbox',
+ type="string",
+ action="store",
+ help='A directory under which all installed files will be placed.')
+
+ global BaseInstallBuilder
+ if BaseInstallBuilder is None:
+ install_sandbox = GetOption('install_sandbox')
+ if install_sandbox:
+ target_factory = DESTDIR_factory(env, install_sandbox)
+ else:
+ target_factory = env.fs
+
+ BaseInstallBuilder = SCons.Builder.Builder(
+ action = install_action,
+ target_factory = target_factory.Entry,
+ source_factory = env.fs.Entry,
+ multi = 1,
+ emitter = [ add_targets_to_INSTALLED_FILES, ],
+ name = 'InstallBuilder')
+
+ env['BUILDERS']['_InternalInstall'] = InstallBuilderWrapper
+ env['BUILDERS']['_InternalInstallAs'] = InstallAsBuilderWrapper
+
+ # We'd like to initialize this doing something like the following,
+ # but there isn't yet support for a ${SOURCE.type} expansion that
+ # will print "file" or "directory" depending on what's being
+ # installed. For now we punt by not initializing it, and letting
+ # the stringFunc() that we put in the action fall back to the
+ # hand-crafted default string if it's not set.
+ #
+ #try:
+ # env['INSTALLSTR']
+ #except KeyError:
+ # env['INSTALLSTR'] = 'Install ${SOURCE.type}: "$SOURCES" as "$TARGETS"'
+
+ try:
+ env['INSTALL']
+ except KeyError:
+ env['INSTALL'] = copyFunc
+
+def exists(env):
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/intelc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/intelc.py
new file mode 100644
index 000000000..dfdedc4ab
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/intelc.py
@@ -0,0 +1,482 @@
+"""SCons.Tool.icl
+
+Tool-specific initialization for the Intel C/C++ compiler.
+Supports Linux and Windows compilers, v7 and up.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/intelc.py 3842 2008/12/20 22:59:52 scons"
+
+import math, sys, os.path, glob, string, re
+
+is_windows = sys.platform == 'win32'
+is_win64 = is_windows and (os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' or
+ (os.environ.has_key('PROCESSOR_ARCHITEW6432') and
+ os.environ['PROCESSOR_ARCHITEW6432'] == 'AMD64'))
+is_linux = sys.platform == 'linux2'
+is_mac = sys.platform == 'darwin'
+
+if is_windows:
+ import SCons.Tool.msvc
+elif is_linux:
+ import SCons.Tool.gcc
+elif is_mac:
+ import SCons.Tool.gcc
+import SCons.Util
+import SCons.Warnings
+
+# Exceptions for this tool
+class IntelCError(SCons.Errors.InternalError):
+ pass
+class MissingRegistryError(IntelCError): # missing registry entry
+ pass
+class MissingDirError(IntelCError): # dir not found
+ pass
+class NoRegistryModuleError(IntelCError): # can't read registry at all
+ pass
+
+def uniquify(s):
+ """Return a sequence containing only one copy of each unique element from input sequence s.
+ Does not preserve order.
+ Input sequence must be hashable (i.e. must be usable as a dictionary key)."""
+ u = {}
+ for x in s:
+ u[x] = 1
+ return u.keys()
+
+def linux_ver_normalize(vstr):
+ """Normalize a Linux compiler version number.
+ Intel changed from "80" to "9.0" in 2005, so we assume if the number
+ is greater than 60 it's an old-style number and otherwise new-style.
+ Always returns an old-style float like 80 or 90 for compatibility with Windows.
+ Shades of Y2K!"""
+ # Check for version number like 9.1.026: return 91.026
+ m = re.match(r'([0-9]+)\.([0-9]+)\.([0-9]+)', vstr)
+ if m:
+ vmaj,vmin,build = m.groups()
+ return float(vmaj) * 10 + float(vmin) + float(build) / 1000.;
+ else:
+ f = float(vstr)
+ if is_windows:
+ return f
+ else:
+ if f < 60: return f * 10.0
+ else: return f
+
+def check_abi(abi):
+ """Check for valid ABI (application binary interface) name,
+ and map into canonical one"""
+ if not abi:
+ return None
+ abi = abi.lower()
+ # valid_abis maps input name to canonical name
+ if is_windows:
+ valid_abis = {'ia32' : 'ia32',
+ 'x86' : 'ia32',
+ 'ia64' : 'ia64',
+ 'em64t' : 'em64t',
+ 'amd64' : 'em64t'}
+ if is_linux:
+ valid_abis = {'ia32' : 'ia32',
+ 'x86' : 'ia32',
+ 'x86_64' : 'x86_64',
+ 'em64t' : 'x86_64',
+ 'amd64' : 'x86_64'}
+ if is_mac:
+ valid_abis = {'ia32' : 'ia32',
+ 'x86' : 'ia32',
+ 'x86_64' : 'x86_64',
+ 'em64t' : 'x86_64'}
+ try:
+ abi = valid_abis[abi]
+ except KeyError:
+ raise SCons.Errors.UserError, \
+ "Intel compiler: Invalid ABI %s, valid values are %s"% \
+ (abi, valid_abis.keys())
+ return abi
+
+def vercmp(a, b):
+ """Compare strings as floats,
+ but Intel changed Linux naming convention at 9.0"""
+ return cmp(linux_ver_normalize(b), linux_ver_normalize(a))
+
+def get_version_from_list(v, vlist):
+ """See if we can match v (string) in vlist (list of strings)
+ Linux has to match in a fuzzy way."""
+ if is_windows:
+ # Simple case, just find it in the list
+ if v in vlist: return v
+ else: return None
+ else:
+ # Fuzzy match: normalize version number first, but still return
+ # original non-normalized form.
+ fuzz = 0.001
+ for vi in vlist:
+ if math.fabs(linux_ver_normalize(vi) - linux_ver_normalize(v)) < fuzz:
+ return vi
+ # Not found
+ return None
+
+def get_intel_registry_value(valuename, version=None, abi=None):
+ """
+ Return a value from the Intel compiler registry tree. (Windows only)
+ """
+ # Open the key:
+ if is_win64:
+ K = 'Software\\Wow6432Node\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper()
+ else:
+ K = 'Software\\Intel\\Compilers\\C++\\' + version + '\\'+abi.upper()
+ try:
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K)
+ except SCons.Util.RegError:
+ raise MissingRegistryError, \
+ "%s was not found in the registry, for Intel compiler version %s, abi='%s'"%(K, version,abi)
+
+ # Get the value:
+ try:
+ v = SCons.Util.RegQueryValueEx(k, valuename)[0]
+ return v # or v.encode('iso-8859-1', 'replace') to remove unicode?
+ except SCons.Util.RegError:
+ raise MissingRegistryError, \
+ "%s\\%s was not found in the registry."%(K, valuename)
+
+
+def get_all_compiler_versions():
+ """Returns a sorted list of strings, like "70" or "80" or "9.0"
+ with most recent compiler version first.
+ """
+ versions=[]
+ if is_windows:
+ if is_win64:
+ keyname = 'Software\\WoW6432Node\\Intel\\Compilers\\C++'
+ else:
+ keyname = 'Software\\Intel\\Compilers\\C++'
+ try:
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ keyname)
+ except WindowsError:
+ return []
+ i = 0
+ versions = []
+ try:
+ while i < 100:
+ subkey = SCons.Util.RegEnumKey(k, i) # raises EnvironmentError
+ # Check that this refers to an existing dir.
+ # This is not 100% perfect but should catch common
+ # installation issues like when the compiler was installed
+ # and then the install directory deleted or moved (rather
+ # than uninstalling properly), so the registry values
+ # are still there.
+ ok = False
+ for try_abi in ('IA32', 'IA32e', 'IA64', 'EM64T'):
+ try:
+ d = get_intel_registry_value('ProductDir', subkey, try_abi)
+ except MissingRegistryError:
+ continue # not found in reg, keep going
+ if os.path.exists(d): ok = True
+ if ok:
+ versions.append(subkey)
+ else:
+ try:
+ # Registry points to nonexistent dir. Ignore this
+ # version.
+ value = get_intel_registry_value('ProductDir', subkey, 'IA32')
+ except MissingRegistryError, e:
+
+ # Registry key is left dangling (potentially
+ # after uninstalling).
+
+ print \
+ "scons: *** Ignoring the registry key for the Intel compiler version %s.\n" \
+ "scons: *** It seems that the compiler was uninstalled and that the registry\n" \
+ "scons: *** was not cleaned up properly.\n" % subkey
+ else:
+ print "scons: *** Ignoring "+str(value)
+
+ i = i + 1
+ except EnvironmentError:
+ # no more subkeys
+ pass
+ elif is_linux:
+ for d in glob.glob('/opt/intel_cc_*'):
+ # Typical dir here is /opt/intel_cc_80.
+ m = re.search(r'cc_(.*)$', d)
+ if m:
+ versions.append(m.group(1))
+ for d in glob.glob('/opt/intel/cc*/*'):
+ # Typical dir here is /opt/intel/cc/9.0 for IA32,
+ # /opt/intel/cce/9.0 for EMT64 (AMD64)
+ m = re.search(r'([0-9.]+)$', d)
+ if m:
+ versions.append(m.group(1))
+ elif is_mac:
+ for d in glob.glob('/opt/intel/cc*/*'):
+ # Typical dir here is /opt/intel/cc/9.0 for IA32,
+ # /opt/intel/cce/9.0 for EMT64 (AMD64)
+ m = re.search(r'([0-9.]+)$', d)
+ if m:
+ versions.append(m.group(1))
+ versions = uniquify(versions) # remove dups
+ versions.sort(vercmp)
+ return versions
+
+def get_intel_compiler_top(version, abi):
+ """
+ Return the main path to the top-level dir of the Intel compiler,
+ using the given version.
+ The compiler will be in <top>/bin/icl.exe (icc on linux),
+ the include dir is <top>/include, etc.
+ """
+
+ if is_windows:
+ if not SCons.Util.can_read_reg:
+ raise NoRegistryModuleError, "No Windows registry module was found"
+ top = get_intel_registry_value('ProductDir', version, abi)
+ if not os.path.exists(os.path.join(top, "Bin", "icl.exe")):
+ raise MissingDirError, \
+ "Can't find Intel compiler in %s"%(top)
+ elif is_mac or is_linux:
+ # first dir is new (>=9.0) style, second is old (8.0) style.
+ dirs=('/opt/intel/cc/%s', '/opt/intel_cc_%s')
+ if abi == 'x86_64':
+ dirs=('/opt/intel/cce/%s',) # 'e' stands for 'em64t', aka x86_64 aka amd64
+ top=None
+ for d in dirs:
+ if os.path.exists(os.path.join(d%version, "bin", "icc")):
+ top = d%version
+ break
+ if not top:
+ raise MissingDirError, \
+ "Can't find version %s Intel compiler in %s (abi='%s')"%(version,top, abi)
+ return top
+
+
+def generate(env, version=None, abi=None, topdir=None, verbose=0):
+ """Add Builders and construction variables for Intel C/C++ compiler
+ to an Environment.
+ args:
+ version: (string) compiler version to use, like "80"
+ abi: (string) 'win32' or whatever Itanium version wants
+ topdir: (string) compiler top dir, like
+ "c:\Program Files\Intel\Compiler70"
+ If topdir is used, version and abi are ignored.
+ verbose: (int) if >0, prints compiler version used.
+ """
+ if not (is_mac or is_linux or is_windows):
+ # can't handle this platform
+ return
+
+ if is_windows:
+ SCons.Tool.msvc.generate(env)
+ elif is_linux:
+ SCons.Tool.gcc.generate(env)
+ elif is_mac:
+ SCons.Tool.gcc.generate(env)
+
+ # if version is unspecified, use latest
+ vlist = get_all_compiler_versions()
+ if not version:
+ if vlist:
+ version = vlist[0]
+ else:
+ # User may have specified '90' but we need to get actual dirname '9.0'.
+ # get_version_from_list does that mapping.
+ v = get_version_from_list(version, vlist)
+ if not v:
+ raise SCons.Errors.UserError, \
+ "Invalid Intel compiler version %s: "%version + \
+ "installed versions are %s"%(', '.join(vlist))
+ version = v
+
+ # if abi is unspecified, use ia32
+ # alternatives are ia64 for Itanium, or amd64 or em64t or x86_64 (all synonyms here)
+ abi = check_abi(abi)
+ if abi is None:
+ if is_mac or is_linux:
+ # Check if we are on 64-bit linux, default to 64 then.
+ uname_m = os.uname()[4]
+ if uname_m == 'x86_64':
+ abi = 'x86_64'
+ else:
+ abi = 'ia32'
+ else:
+ if is_win64:
+ abi = 'em64t'
+ else:
+ abi = 'ia32'
+
+ if version and not topdir:
+ try:
+ topdir = get_intel_compiler_top(version, abi)
+ except (SCons.Util.RegError, IntelCError):
+ topdir = None
+
+ if not topdir:
+ # Normally this is an error, but it might not be if the compiler is
+ # on $PATH and the user is importing their env.
+ class ICLTopDirWarning(SCons.Warnings.Warning):
+ pass
+ if (is_mac or is_linux) and not env.Detect('icc') or \
+ is_windows and not env.Detect('icl'):
+
+ SCons.Warnings.enableWarningClass(ICLTopDirWarning)
+ SCons.Warnings.warn(ICLTopDirWarning,
+ "Failed to find Intel compiler for version='%s', abi='%s'"%
+ (str(version), str(abi)))
+ else:
+ # should be cleaned up to say what this other version is
+ # since in this case we have some other Intel compiler installed
+ SCons.Warnings.enableWarningClass(ICLTopDirWarning)
+ SCons.Warnings.warn(ICLTopDirWarning,
+ "Can't find Intel compiler top dir for version='%s', abi='%s'"%
+ (str(version), str(abi)))
+
+ if topdir:
+ if verbose:
+ print "Intel C compiler: using version %s (%g), abi %s, in '%s'"%\
+ (repr(version), linux_ver_normalize(version),abi,topdir)
+ if is_linux:
+ # Show the actual compiler version by running the compiler.
+ os.system('%s/bin/icc --version'%topdir)
+ if is_mac:
+ # Show the actual compiler version by running the compiler.
+ os.system('%s/bin/icc --version'%topdir)
+
+ env['INTEL_C_COMPILER_TOP'] = topdir
+ if is_linux:
+ paths={'INCLUDE' : 'include',
+ 'LIB' : 'lib',
+ 'PATH' : 'bin',
+ 'LD_LIBRARY_PATH' : 'lib'}
+ for p in paths.keys():
+ env.PrependENVPath(p, os.path.join(topdir, paths[p]))
+ if is_mac:
+ paths={'INCLUDE' : 'include',
+ 'LIB' : 'lib',
+ 'PATH' : 'bin',
+ 'LD_LIBRARY_PATH' : 'lib'}
+ for p in paths.keys():
+ env.PrependENVPath(p, os.path.join(topdir, paths[p]))
+ if is_windows:
+ # env key reg valname default subdir of top
+ paths=(('INCLUDE', 'IncludeDir', 'Include'),
+ ('LIB' , 'LibDir', 'Lib'),
+ ('PATH' , 'BinDir', 'Bin'))
+ # We are supposed to ignore version if topdir is set, so set
+ # it to the emptry string if it's not already set.
+ if version is None:
+ version = ''
+ # Each path has a registry entry, use that or default to subdir
+ for p in paths:
+ try:
+ path=get_intel_registry_value(p[1], version, abi)
+ # These paths may have $(ICInstallDir)
+ # which needs to be substituted with the topdir.
+ path=path.replace('$(ICInstallDir)', topdir + os.sep)
+ except IntelCError:
+ # Couldn't get it from registry: use default subdir of topdir
+ env.PrependENVPath(p[0], os.path.join(topdir, p[2]))
+ else:
+ env.PrependENVPath(p[0], string.split(path, os.pathsep))
+ # print "ICL %s: %s, final=%s"%(p[0], path, str(env['ENV'][p[0]]))
+
+ if is_windows:
+ env['CC'] = 'icl'
+ env['CXX'] = 'icl'
+ env['LINK'] = 'xilink'
+ else:
+ env['CC'] = 'icc'
+ env['CXX'] = 'icpc'
+ # Don't reset LINK here;
+ # use smart_link which should already be here from link.py.
+ #env['LINK'] = '$CC'
+ env['AR'] = 'xiar'
+ env['LD'] = 'xild' # not used by default
+
+ # This is not the exact (detailed) compiler version,
+ # just the major version as determined above or specified
+ # by the user. It is a float like 80 or 90, in normalized form for Linux
+ # (i.e. even for Linux 9.0 compiler, still returns 90 rather than 9.0)
+ if version:
+ env['INTEL_C_COMPILER_VERSION']=linux_ver_normalize(version)
+
+ if is_windows:
+ # Look for license file dir
+ # in system environment, registry, and default location.
+ envlicdir = os.environ.get("INTEL_LICENSE_FILE", '')
+ K = ('SOFTWARE\Intel\Licenses')
+ try:
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE, K)
+ reglicdir = SCons.Util.RegQueryValueEx(k, "w_cpp")[0]
+ except (AttributeError, SCons.Util.RegError):
+ reglicdir = ""
+ defaultlicdir = r'C:\Program Files\Common Files\Intel\Licenses'
+
+ licdir = None
+ for ld in [envlicdir, reglicdir]:
+ # If the string contains an '@', then assume it's a network
+ # license (port@system) and good by definition.
+ if ld and (string.find(ld, '@') != -1 or os.path.exists(ld)):
+ licdir = ld
+ break
+ if not licdir:
+ licdir = defaultlicdir
+ if not os.path.exists(licdir):
+ class ICLLicenseDirWarning(SCons.Warnings.Warning):
+ pass
+ SCons.Warnings.enableWarningClass(ICLLicenseDirWarning)
+ SCons.Warnings.warn(ICLLicenseDirWarning,
+ "Intel license dir was not found."
+ " Tried using the INTEL_LICENSE_FILE environment variable (%s), the registry (%s) and the default path (%s)."
+ " Using the default path as a last resort."
+ % (envlicdir, reglicdir, defaultlicdir))
+ env['ENV']['INTEL_LICENSE_FILE'] = licdir
+
+def exists(env):
+ if not (is_mac or is_linux or is_windows):
+ # can't handle this platform
+ return 0
+
+ try:
+ versions = get_all_compiler_versions()
+ except (SCons.Util.RegError, IntelCError):
+ versions = None
+ detected = versions is not None and len(versions) > 0
+ if not detected:
+ # try env.Detect, maybe that will work
+ if is_windows:
+ return env.Detect('icl')
+ elif is_linux:
+ return env.Detect('icc')
+ elif is_mac:
+ return env.Detect('icc')
+ return detected
+
+# end of file
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/jar.py b/tools/scons/scons-local-1.2.0/SCons/Tool/jar.py
new file mode 100644
index 000000000..be50b016d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/jar.py
@@ -0,0 +1,104 @@
+"""SCons.Tool.jar
+
+Tool-specific initialization for jar.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/jar.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Subst
+import SCons.Util
+
+def jarSources(target, source, env, for_signature):
+ """Only include sources that are not a manifest file."""
+ try:
+ env['JARCHDIR']
+ except KeyError:
+ jarchdir_set = False
+ else:
+ jarchdir_set = True
+ jarchdir = env.subst('$JARCHDIR', target=target, source=source)
+ if jarchdir:
+ jarchdir = env.fs.Dir(jarchdir)
+ result = []
+ for src in source:
+ contents = src.get_contents()
+ if contents[:16] != "Manifest-Version":
+ if jarchdir_set:
+ _chdir = jarchdir
+ else:
+ try:
+ _chdir = src.attributes.java_classdir
+ except AttributeError:
+ _chdir = None
+ if _chdir:
+ # If we are changing the dir with -C, then sources should
+ # be relative to that directory.
+ src = SCons.Subst.Literal(src.get_path(_chdir))
+ result.append('-C')
+ result.append(_chdir)
+ result.append(src)
+ return result
+
+def jarManifest(target, source, env, for_signature):
+ """Look in sources for a manifest file, if any."""
+ for src in source:
+ contents = src.get_contents()
+ if contents[:16] == "Manifest-Version":
+ return src
+ return ''
+
+def jarFlags(target, source, env, for_signature):
+ """If we have a manifest, make sure that the 'm'
+ flag is specified."""
+ jarflags = env.subst('$JARFLAGS', target=target, source=source)
+ for src in source:
+ contents = src.get_contents()
+ if contents[:16] == "Manifest-Version":
+ if not 'm' in jarflags:
+ return jarflags + 'm'
+ break
+ return jarflags
+
+def generate(env):
+ """Add Builders and construction variables for jar to an Environment."""
+ SCons.Tool.CreateJarBuilder(env)
+
+ env['JAR'] = 'jar'
+ env['JARFLAGS'] = SCons.Util.CLVar('cf')
+ env['_JARFLAGS'] = jarFlags
+ env['_JARMANIFEST'] = jarManifest
+ env['_JARSOURCES'] = jarSources
+ env['_JARCOM'] = '$JAR $_JARFLAGS $TARGET $_JARMANIFEST $_JARSOURCES'
+ env['JARCOM'] = "${TEMPFILE('$_JARCOM')}"
+ env['JARSUFFIX'] = '.jar'
+
+def exists(env):
+ return env.Detect('jar')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/javac.py b/tools/scons/scons-local-1.2.0/SCons/Tool/javac.py
new file mode 100644
index 000000000..b8cabe89b
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/javac.py
@@ -0,0 +1,228 @@
+"""SCons.Tool.javac
+
+Tool-specific initialization for javac.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/javac.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+
+import SCons.Action
+import SCons.Builder
+from SCons.Node.FS import _my_normcase
+from SCons.Tool.JavaCommon import parse_java_file
+import SCons.Util
+
+def classname(path):
+ """Turn a string (path name) into a Java class name."""
+ return string.replace(os.path.normpath(path), os.sep, '.')
+
+def emit_java_classes(target, source, env):
+ """Create and return lists of source java files
+ and their corresponding target class files.
+ """
+ java_suffix = env.get('JAVASUFFIX', '.java')
+ class_suffix = env.get('JAVACLASSSUFFIX', '.class')
+
+ target[0].must_be_same(SCons.Node.FS.Dir)
+ classdir = target[0]
+
+ s = source[0].rentry().disambiguate()
+ if isinstance(s, SCons.Node.FS.File):
+ sourcedir = s.dir.rdir()
+ elif isinstance(s, SCons.Node.FS.Dir):
+ sourcedir = s.rdir()
+ else:
+ raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % s.__class__)
+
+ slist = []
+ js = _my_normcase(java_suffix)
+ find_java = lambda n, js=js, ljs=len(js): _my_normcase(n[-ljs:]) == js
+ for entry in source:
+ entry = entry.rentry().disambiguate()
+ if isinstance(entry, SCons.Node.FS.File):
+ slist.append(entry)
+ elif isinstance(entry, SCons.Node.FS.Dir):
+ result = SCons.Util.OrderedDict()
+ def visit(arg, dirname, names, fj=find_java, dirnode=entry.rdir()):
+ java_files = filter(fj, names)
+ # The on-disk entries come back in arbitrary order. Sort
+ # them so our target and source lists are determinate.
+ java_files.sort()
+ mydir = dirnode.Dir(dirname)
+ java_paths = map(lambda f, d=mydir: d.File(f), java_files)
+ for jp in java_paths:
+ arg[jp] = True
+
+ os.path.walk(entry.rdir().get_abspath(), visit, result)
+ entry.walk(visit, result)
+
+ slist.extend(result.keys())
+ else:
+ raise SCons.Errors.UserError("Java source must be File or Dir, not '%s'" % entry.__class__)
+
+ version = env.get('JAVAVERSION', '1.4')
+ full_tlist = []
+ for f in slist:
+ tlist = []
+ source_file_based = True
+ pkg_dir = None
+ if not f.is_derived():
+ pkg_dir, classes = parse_java_file(f.rfile().get_abspath(), version)
+ if classes:
+ source_file_based = False
+ if pkg_dir:
+ d = target[0].Dir(pkg_dir)
+ p = pkg_dir + os.sep
+ else:
+ d = target[0]
+ p = ''
+ for c in classes:
+ t = d.File(c + class_suffix)
+ t.attributes.java_classdir = classdir
+ t.attributes.java_sourcedir = sourcedir
+ t.attributes.java_classname = classname(p + c)
+ tlist.append(t)
+
+ if source_file_based:
+ base = f.name[:-len(java_suffix)]
+ if pkg_dir:
+ t = target[0].Dir(pkg_dir).File(base + class_suffix)
+ else:
+ t = target[0].File(base + class_suffix)
+ t.attributes.java_classdir = classdir
+ t.attributes.java_sourcedir = f.dir
+ t.attributes.java_classname = classname(base)
+ tlist.append(t)
+
+ for t in tlist:
+ t.set_specific_source([f])
+
+ full_tlist.extend(tlist)
+
+ return full_tlist, slist
+
+JavaAction = SCons.Action.Action('$JAVACCOM', '$JAVACCOMSTR')
+
+JavaBuilder = SCons.Builder.Builder(action = JavaAction,
+ emitter = emit_java_classes,
+ target_factory = SCons.Node.FS.Entry,
+ source_factory = SCons.Node.FS.Entry)
+
+class pathopt:
+ """
+ Callable object for generating javac-style path options from
+ a construction variable (e.g. -classpath, -sourcepath).
+ """
+ def __init__(self, opt, var, default=None):
+ self.opt = opt
+ self.var = var
+ self.default = default
+
+ def __call__(self, target, source, env, for_signature):
+ path = env[self.var]
+ if path and not SCons.Util.is_List(path):
+ path = [path]
+ if self.default:
+ path = path + [ env[self.default] ]
+ if path:
+ return [self.opt, string.join(path, os.pathsep)]
+ #return self.opt + " " + string.join(path, os.pathsep)
+ else:
+ return []
+ #return ""
+
+def Java(env, target, source, *args, **kw):
+ """
+ A pseudo-Builder wrapper around the separate JavaClass{File,Dir}
+ Builders.
+ """
+ if not SCons.Util.is_List(target):
+ target = [target]
+ if not SCons.Util.is_List(source):
+ source = [source]
+
+ # Pad the target list with repetitions of the last element in the
+ # list so we have a target for every source element.
+ target = target + ([target[-1]] * (len(source) - len(target)))
+
+ java_suffix = env.subst('$JAVASUFFIX')
+ result = []
+
+ for t, s in zip(target, source):
+ if isinstance(s, SCons.Node.FS.Base):
+ if isinstance(s, SCons.Node.FS.File):
+ b = env.JavaClassFile
+ else:
+ b = env.JavaClassDir
+ else:
+ if os.path.isfile(s):
+ b = env.JavaClassFile
+ elif os.path.isdir(s):
+ b = env.JavaClassDir
+ elif s[-len(java_suffix):] == java_suffix:
+ b = env.JavaClassFile
+ else:
+ b = env.JavaClassDir
+ result.extend(apply(b, (t, s) + args, kw))
+
+ return result
+
+def generate(env):
+ """Add Builders and construction variables for javac to an Environment."""
+ java_file = SCons.Tool.CreateJavaFileBuilder(env)
+ java_class = SCons.Tool.CreateJavaClassFileBuilder(env)
+ java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env)
+ java_class.add_emitter(None, emit_java_classes)
+ java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes)
+ java_class_dir.emitter = emit_java_classes
+
+ env.AddMethod(Java)
+
+ env['JAVAC'] = 'javac'
+ env['JAVACFLAGS'] = SCons.Util.CLVar('')
+ env['JAVABOOTCLASSPATH'] = []
+ env['JAVACLASSPATH'] = []
+ env['JAVASOURCEPATH'] = []
+ env['_javapathopt'] = pathopt
+ env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} '
+ env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} '
+ env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} '
+ env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}'
+ env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES'
+ env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM')}"
+ env['JAVACLASSSUFFIX'] = '.class'
+ env['JAVASUFFIX'] = '.java'
+
+def exists(env):
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/javah.py b/tools/scons/scons-local-1.2.0/SCons/Tool/javah.py
new file mode 100644
index 000000000..3a39aebca
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/javah.py
@@ -0,0 +1,132 @@
+"""SCons.Tool.javah
+
+Tool-specific initialization for javah.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/javah.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Node.FS
+import SCons.Tool.javac
+import SCons.Util
+
+def emit_java_headers(target, source, env):
+ """Create and return lists of Java stub header files that will
+ be created from a set of class files.
+ """
+ class_suffix = env.get('JAVACLASSSUFFIX', '.class')
+ classdir = env.get('JAVACLASSDIR')
+
+ if not classdir:
+ try:
+ s = source[0]
+ except IndexError:
+ classdir = '.'
+ else:
+ try:
+ classdir = s.attributes.java_classdir
+ except AttributeError:
+ classdir = '.'
+ classdir = env.Dir(classdir).rdir()
+
+ if str(classdir) == '.':
+ c_ = None
+ else:
+ c_ = str(classdir) + os.sep
+
+ slist = []
+ for src in source:
+ try:
+ classname = src.attributes.java_classname
+ except AttributeError:
+ classname = str(src)
+ if c_ and classname[:len(c_)] == c_:
+ classname = classname[len(c_):]
+ if class_suffix and classname[-len(class_suffix):] == class_suffix:
+ classname = classname[:-len(class_suffix)]
+ classname = SCons.Tool.javac.classname(classname)
+ s = src.rfile()
+ s.attributes.java_classname = classname
+ slist.append(s)
+
+ s = source[0].rfile()
+ if not hasattr(s.attributes, 'java_classdir'):
+ s.attributes.java_classdir = classdir
+
+ if target[0].__class__ is SCons.Node.FS.File:
+ tlist = target
+ else:
+ if not isinstance(target[0], SCons.Node.FS.Dir):
+ target[0].__class__ = SCons.Node.FS.Dir
+ target[0]._morph()
+ tlist = []
+ for s in source:
+ fname = string.replace(s.attributes.java_classname, '.', '_') + '.h'
+ t = target[0].File(fname)
+ t.attributes.java_lookupdir = target[0]
+ tlist.append(t)
+
+ return tlist, source
+
+def JavaHOutFlagGenerator(target, source, env, for_signature):
+ try:
+ t = target[0]
+ except (AttributeError, TypeError):
+ t = target
+ try:
+ return '-d ' + str(t.attributes.java_lookupdir)
+ except AttributeError:
+ return '-o ' + str(t)
+
+def getJavaHClassPath(env,target, source, for_signature):
+ path = "${SOURCE.attributes.java_classdir}"
+ if env.has_key('JAVACLASSPATH') and env['JAVACLASSPATH']:
+ path = SCons.Util.AppendPath(path, env['JAVACLASSPATH'])
+ return "-classpath %s" % (path)
+
+def generate(env):
+ """Add Builders and construction variables for javah to an Environment."""
+ java_javah = SCons.Tool.CreateJavaHBuilder(env)
+ java_javah.emitter = emit_java_headers
+
+ env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator
+ env['JAVAH'] = 'javah'
+ env['JAVAHFLAGS'] = SCons.Util.CLVar('')
+ env['_JAVAHCLASSPATH'] = getJavaHClassPath
+ env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}'
+ env['JAVACLASSSUFFIX'] = '.class'
+
+def exists(env):
+ return env.Detect('javah')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/latex.py b/tools/scons/scons-local-1.2.0/SCons/Tool/latex.py
new file mode 100644
index 000000000..549f6d374
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/latex.py
@@ -0,0 +1,76 @@
+"""SCons.Tool.latex
+
+Tool-specific initialization for LaTeX.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/latex.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Scanner.LaTeX
+import SCons.Util
+import SCons.Tool
+import SCons.Tool.tex
+
+LaTeXAction = None
+
+def LaTeXAuxFunction(target = None, source= None, env=None):
+ result = SCons.Tool.tex.InternalLaTeXAuxAction( LaTeXAction, target, source, env )
+ return result
+
+LaTeXAuxAction = SCons.Action.Action(LaTeXAuxFunction,
+ strfunction=SCons.Tool.tex.TeXLaTeXStrFunction)
+
+def generate(env):
+ """Add Builders and construction variables for LaTeX to an Environment."""
+ global LaTeXAction
+ if LaTeXAction is None:
+ LaTeXAction = SCons.Action.Action('$LATEXCOM', '$LATEXCOMSTR')
+
+ import dvi
+ dvi.generate(env)
+
+ import pdf
+ pdf.generate(env)
+
+ bld = env['BUILDERS']['DVI']
+ bld.add_action('.ltx', LaTeXAuxAction)
+ bld.add_action('.latex', LaTeXAuxAction)
+ bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter)
+ bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter)
+
+ env['LATEX'] = 'latex'
+ env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}'
+ env['LATEXRETRIES'] = 3
+
+def exists(env):
+ return env.Detect('latex')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/lex.py b/tools/scons/scons-local-1.2.0/SCons/Tool/lex.py
new file mode 100644
index 000000000..f2e0e856d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/lex.py
@@ -0,0 +1,93 @@
+"""SCons.Tool.lex
+
+Tool-specific initialization for lex.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/lex.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import string
+
+import SCons.Action
+import SCons.Tool
+import SCons.Util
+
+LexAction = SCons.Action.Action("$LEXCOM", "$LEXCOMSTR")
+
+def lexEmitter(target, source, env):
+ sourceBase, sourceExt = os.path.splitext(SCons.Util.to_String(source[0]))
+
+ if sourceExt == ".lm": # If using Objective-C
+ target = [sourceBase + ".m"] # the extension is ".m".
+
+ # This emitter essentially tries to add to the target all extra
+ # files generated by flex.
+
+ # Different options that are used to trigger the creation of extra files.
+ fileGenOptions = ["--header-file=", "--tables-file="]
+
+ lexflags = env.subst("$LEXFLAGS", target=target, source=source)
+ for option in SCons.Util.CLVar(lexflags):
+ for fileGenOption in fileGenOptions:
+ l = len(fileGenOption)
+ if option[:l] == fileGenOption:
+ # A file generating option is present, so add the
+ # file name to the target list.
+ fileName = string.strip(option[l:])
+ target.append(fileName)
+ return (target, source)
+
+def generate(env):
+ """Add Builders and construction variables for lex to an Environment."""
+ c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
+
+ # C
+ c_file.add_action(".l", LexAction)
+ c_file.add_emitter(".l", lexEmitter)
+
+ c_file.add_action(".lex", LexAction)
+ c_file.add_emitter(".lex", lexEmitter)
+
+ # Objective-C
+ cxx_file.add_action(".lm", LexAction)
+ cxx_file.add_emitter(".lm", lexEmitter)
+
+ # C++
+ cxx_file.add_action(".ll", LexAction)
+ cxx_file.add_emitter(".ll", lexEmitter)
+
+ env["LEX"] = env.Detect("flex") or "lex"
+ env["LEXFLAGS"] = SCons.Util.CLVar("")
+ env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
+
+def exists(env):
+ return env.Detect(["flex", "lex"])
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/link.py b/tools/scons/scons-local-1.2.0/SCons/Tool/link.py
new file mode 100644
index 000000000..d02bb25fb
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/link.py
@@ -0,0 +1,112 @@
+"""SCons.Tool.link
+
+Tool-specific initialization for the generic Posix linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/link.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+import SCons.Warnings
+
+from SCons.Tool.FortranCommon import isfortran
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+issued_mixed_link_warning = False
+
+def smart_link(source, target, env, for_signature):
+ has_cplusplus = cplusplus.iscplusplus(source)
+ has_fortran = isfortran(env, source)
+ if has_cplusplus and has_fortran:
+ global issued_mixed_link_warning
+ if not issued_mixed_link_warning:
+ msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \
+ "This may generate a buggy executable if the %s\n\t" + \
+ "compiler does not know how to deal with Fortran runtimes."
+ SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning,
+ msg % repr(env.subst('$CXX')))
+ issued_mixed_link_warning = True
+ return '$CXX'
+ elif has_fortran:
+ return '$FORTRAN'
+ elif has_cplusplus:
+ return '$CXX'
+ return '$CC'
+
+def shlib_emitter(target, source, env):
+ for tgt in target:
+ tgt.attributes.shared = 1
+ return (target, source)
+
+def generate(env):
+ """Add Builders and construction variables for gnulink to an Environment."""
+ SCons.Tool.createSharedLibBuilder(env)
+ SCons.Tool.createProgBuilder(env)
+
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
+ env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
+ # don't set up the emitter, cause AppendUnique will generate a list
+ # starting with None :-(
+ env.Append(SHLIBEMITTER = [shlib_emitter])
+ env['SMARTLINK'] = smart_link
+ env['LINK'] = "$SMARTLINK"
+ env['LINKFLAGS'] = SCons.Util.CLVar('')
+ env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
+ env['LIBDIRPREFIX']='-L'
+ env['LIBDIRSUFFIX']=''
+ env['_LIBFLAGS']='${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
+ env['LIBLINKPREFIX']='-l'
+ env['LIBLINKSUFFIX']=''
+
+ if env['PLATFORM'] == 'hpux':
+ env['SHLIBSUFFIX'] = '.sl'
+ elif env['PLATFORM'] == 'aix':
+ env['SHLIBSUFFIX'] = '.a'
+
+ # For most platforms, a loadable module is the same as a shared
+ # library. Platforms which are different can override these, but
+ # setting them the same means that LoadableModule works everywhere.
+ SCons.Tool.createLoadableModuleBuilder(env)
+ env['LDMODULE'] = '$SHLINK'
+ env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
+ env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
+ env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
+ env['LDMODULECOM'] = '$SHLINKCOM'
+
+
+
+def exists(env):
+ # This module isn't really a Tool on its own, it's common logic for
+ # other linkers.
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/linkloc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/linkloc.py
new file mode 100644
index 000000000..b0550c6e3
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/linkloc.py
@@ -0,0 +1,105 @@
+"""SCons.Tool.linkloc
+
+Tool specification for the LinkLoc linker for the Phar Lap ETS embedded
+operating system.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/linkloc.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Errors
+import SCons.Tool
+import SCons.Util
+
+from SCons.Tool.msvc import get_msvc_paths
+from SCons.Tool.PharLapCommon import addPharLapPaths
+
+_re_linker_command = re.compile(r'(\s)@\s*([^\s]+)')
+
+def repl_linker_command(m):
+ # Replaces any linker command file directives (e.g. "@foo.lnk") with
+ # the actual contents of the file.
+ try:
+ f=open(m.group(2), "r")
+ return m.group(1) + f.read()
+ except IOError:
+ # the linker should return an error if it can't
+ # find the linker command file so we will remain quiet.
+ # However, we will replace the @ with a # so we will not continue
+ # to find it with recursive substitution
+ return m.group(1) + '#' + m.group(2)
+
+class LinklocGenerator:
+ def __init__(self, cmdline):
+ self.cmdline = cmdline
+
+ def __call__(self, env, target, source, for_signature):
+ if for_signature:
+ # Expand the contents of any linker command files recursively
+ subs = 1
+ strsub = env.subst(self.cmdline, target=target, source=source)
+ while subs:
+ strsub, subs = _re_linker_command.subn(repl_linker_command, strsub)
+ return strsub
+ else:
+ return "${TEMPFILE('" + self.cmdline + "')}"
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createSharedLibBuilder(env)
+ SCons.Tool.createProgBuilder(env)
+
+ env['SUBST_CMD_FILE'] = LinklocGenerator
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS')
+ env['SHLINKCOM'] = '${SUBST_CMD_FILE("$SHLINK $SHLINKFLAGS $( $_LIBDIRFLAGS $) $_LIBFLAGS -dll $TARGET $SOURCES")}'
+ env['SHLIBEMITTER']= None
+ env['LINK'] = "linkloc"
+ env['LINKFLAGS'] = SCons.Util.CLVar('')
+ env['LINKCOM'] = '${SUBST_CMD_FILE("$LINK $LINKFLAGS $( $_LIBDIRFLAGS $) $_LIBFLAGS -exe $TARGET $SOURCES")}'
+ env['LIBDIRPREFIX']='-libpath '
+ env['LIBDIRSUFFIX']=''
+ env['LIBLINKPREFIX']='-lib '
+ env['LIBLINKSUFFIX']='$LIBSUFFIX'
+
+ msvs_version = env.get('MSVS_VERSION')
+ include_path, lib_path, exe_path = get_msvc_paths(env, version = msvs_version)
+ env['ENV']['LIB'] = lib_path
+ env.PrependENVPath('PATH', exe_path)
+
+ addPharLapPaths(env)
+
+def exists(env):
+ return env.Detect('linkloc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/m4.py b/tools/scons/scons-local-1.2.0/SCons/Tool/m4.py
new file mode 100644
index 000000000..0d81d7146
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/m4.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.m4
+
+Tool-specific initialization for m4.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/m4.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for m4 to an Environment."""
+ M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR')
+ bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4')
+
+ env['BUILDERS']['M4'] = bld
+
+ # .m4 files might include other files, and it would be pretty hard
+ # to write a scanner for it, so let's just cd to the dir of the m4
+ # file and run from there.
+ # The src_suffix setup is like so: file.c.m4 -> file.c,
+ # file.cpp.m4 -> file.cpp etc.
+ env['M4'] = 'm4'
+ env['M4FLAGS'] = SCons.Util.CLVar('-E')
+ env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}'
+
+def exists(env):
+ return env.Detect('m4')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/masm.py b/tools/scons/scons-local-1.2.0/SCons/Tool/masm.py
new file mode 100644
index 000000000..850890087
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/masm.py
@@ -0,0 +1,71 @@
+"""SCons.Tool.masm
+
+Tool-specific initialization for the Microsoft Assembler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/masm.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+ASSuffixes = ['.s', '.asm', '.ASM']
+ASPPSuffixes = ['.spp', '.SPP', '.sx']
+if SCons.Util.case_sensitive_suffixes('.s', '.S'):
+ ASPPSuffixes.extend(['.S'])
+else:
+ ASSuffixes.extend(['.S'])
+
+def generate(env):
+ """Add Builders and construction variables for masm to an Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in ASSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ASAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ for suffix in ASPPSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ASPPAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+ shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
+
+ env['AS'] = 'ml'
+ env['ASFLAGS'] = SCons.Util.CLVar('/nologo')
+ env['ASPPFLAGS'] = '$ASFLAGS'
+ env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES'
+ env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+
+def exists(env):
+ return env.Detect('ml')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/midl.py b/tools/scons/scons-local-1.2.0/SCons/Tool/midl.py
new file mode 100644
index 000000000..df1bf9a5d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/midl.py
@@ -0,0 +1,90 @@
+"""SCons.Tool.midl
+
+Tool-specific initialization for midl (Microsoft IDL compiler).
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/midl.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Scanner.IDL
+import SCons.Util
+
+def midl_emitter(target, source, env):
+ """Produces a list of outputs from the MIDL compiler"""
+ base, ext = SCons.Util.splitext(str(target[0]))
+ tlb = target[0]
+ incl = base + '.h'
+ interface = base + '_i.c'
+ t = [tlb, incl, interface]
+
+ midlcom = env['MIDLCOM']
+
+ if string.find(midlcom, '/proxy') != -1:
+ proxy = base + '_p.c'
+ t.append(proxy)
+ if string.find(midlcom, '/dlldata') != -1:
+ dlldata = base + '_data.c'
+ t.append(dlldata)
+
+ return (t,source)
+
+idl_scanner = SCons.Scanner.IDL.IDLScan()
+
+midl_action = SCons.Action.Action('$MIDLCOM', '$MIDLCOMSTR')
+
+midl_builder = SCons.Builder.Builder(action = midl_action,
+ src_suffix = '.idl',
+ suffix='.tlb',
+ emitter = midl_emitter,
+ source_scanner = idl_scanner)
+
+def generate(env):
+ """Add Builders and construction variables for midl to an Environment."""
+
+ env['MIDL'] = 'MIDL.EXE'
+ env['MIDLFLAGS'] = SCons.Util.CLVar('/nologo')
+ env['MIDLCOM'] = '$MIDL $MIDLFLAGS /tlb ${TARGETS[0]} /h ${TARGETS[1]} /iid ${TARGETS[2]} /proxy ${TARGETS[3]} /dlldata ${TARGETS[4]} $SOURCE 2> NUL'
+ env['BUILDERS']['TypeLibrary'] = midl_builder
+
+def exists(env):
+ if not env['PLATFORM'] in ('win32', 'cygwin'):
+ return 0
+
+ import SCons.Tool.msvs
+ if SCons.Tool.msvs.is_msvs_installed():
+ # there's at least one version of MSVS installed, which comes with midl:
+ return 1
+ else:
+ return env.Detect('midl')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/mingw.py b/tools/scons/scons-local-1.2.0/SCons/Tool/mingw.py
new file mode 100644
index 000000000..faec2e9ed
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/mingw.py
@@ -0,0 +1,151 @@
+"""SCons.Tool.gcc
+
+Tool-specific initialization for MinGW (http://www.mingw.org/)
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/mingw.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+# This is what we search for to find mingw:
+key_program = 'mingw32-gcc'
+
+def find(env):
+ # First search in the SCons path and then the OS path:
+ return env.WhereIs(key_program) or SCons.Util.WhereIs(key_program)
+
+def shlib_generator(target, source, env, for_signature):
+ cmd = SCons.Util.CLVar(['$SHLINK', '$SHLINKFLAGS'])
+
+ dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
+ if dll: cmd.extend(['-o', dll])
+
+ cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
+
+ implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
+ if implib: cmd.append('-Wl,--out-implib,'+implib.get_string(for_signature))
+
+ def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX')
+ if def_target: cmd.append('-Wl,--output-def,'+def_target.get_string(for_signature))
+
+ return [cmd]
+
+def shlib_emitter(target, source, env):
+ dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
+ no_import_lib = env.get('no_import_lib', 0)
+
+ if not dll:
+ raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")
+
+ if not no_import_lib and \
+ not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'):
+
+ # Append an import library to the list of targets.
+ target.append(env.ReplaceIxes(dll,
+ 'SHLIBPREFIX', 'SHLIBSUFFIX',
+ 'LIBPREFIX', 'LIBSUFFIX'))
+
+ # Append a def file target if there isn't already a def file target
+ # or a def file source. There is no option to disable def file
+ # target emitting, because I can't figure out why someone would ever
+ # want to turn it off.
+ def_source = env.FindIxes(source, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX')
+ def_target = env.FindIxes(target, 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX')
+ if not def_source and not def_target:
+ target.append(env.ReplaceIxes(dll,
+ 'SHLIBPREFIX', 'SHLIBSUFFIX',
+ 'WINDOWSDEFPREFIX', 'WINDOWSDEFSUFFIX'))
+
+ return (target, source)
+
+
+shlib_action = SCons.Action.Action(shlib_generator, generator=1)
+
+res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR')
+
+res_builder = SCons.Builder.Builder(action=res_action, suffix='.o',
+ source_scanner=SCons.Tool.SourceFileScanner)
+SCons.Tool.SourceFileScanner.add_scanner('.rc', SCons.Defaults.CScan)
+
+def generate(env):
+ mingw = find(env)
+ if mingw:
+ dir = os.path.dirname(mingw)
+ env.PrependENVPath('PATH', dir )
+
+
+ # Most of mingw is the same as gcc and friends...
+ gnu_tools = ['gcc', 'g++', 'gnulink', 'ar', 'gas', 'm4']
+ for tool in gnu_tools:
+ SCons.Tool.Tool(tool)(env)
+
+ #... but a few things differ:
+ env['CC'] = 'gcc'
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
+ env['CXX'] = 'g++'
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
+ env['SHLINKCOM'] = shlib_action
+ env['LDMODULECOM'] = shlib_action
+ env.Append(SHLIBEMITTER = [shlib_emitter])
+ env['AS'] = 'as'
+
+ env['WIN32DEFPREFIX'] = ''
+ env['WIN32DEFSUFFIX'] = '.def'
+ env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
+ env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
+
+ env['SHOBJSUFFIX'] = '.o'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+
+ env['RC'] = 'windres'
+ env['RCFLAGS'] = SCons.Util.CLVar('')
+ env['RCINCFLAGS'] = '$( ${_concat(RCINCPREFIX, CPPPATH, RCINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
+ env['RCINCPREFIX'] = '--include-dir '
+ env['RCINCSUFFIX'] = ''
+ env['RCCOM'] = '$RC $_CPPDEFFLAGS $RCINCFLAGS ${RCINCPREFIX} ${SOURCE.dir} $RCFLAGS -i $SOURCE -o $TARGET'
+ env['BUILDERS']['RES'] = res_builder
+
+ # Some setting from the platform also have to be overridden:
+ env['OBJSUFFIX'] = '.o'
+ env['LIBPREFIX'] = 'lib'
+ env['LIBSUFFIX'] = '.a'
+
+def exists(env):
+ return find(env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/mslib.py b/tools/scons/scons-local-1.2.0/SCons/Tool/mslib.py
new file mode 100644
index 000000000..340f9927d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/mslib.py
@@ -0,0 +1,76 @@
+"""SCons.Tool.mslib
+
+Tool-specific initialization for lib (MicroSoft library archiver).
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/mslib.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Tool.msvs
+import SCons.Tool.msvc
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for lib to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+
+ try:
+ version = SCons.Tool.msvs.get_default_visualstudio_version(env)
+
+ if env.has_key('MSVS_IGNORE_IDE_PATHS') and env['MSVS_IGNORE_IDE_PATHS']:
+ include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_default_paths(env,version)
+ else:
+ include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_paths(env,version)
+
+ # since other tools can set this, we just make sure that the
+ # relevant stuff from MSVS is in there somewhere.
+ env.PrependENVPath('PATH', exe_path)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ pass
+
+ env['AR'] = 'lib'
+ env['ARFLAGS'] = SCons.Util.CLVar('/nologo')
+ env['ARCOM'] = "${TEMPFILE('$AR $ARFLAGS /OUT:$TARGET $SOURCES')}"
+ env['LIBPREFIX'] = ''
+ env['LIBSUFFIX'] = '.lib'
+
+def exists(env):
+ try:
+ v = SCons.Tool.msvs.get_visualstudio_versions()
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ pass
+
+ if not v:
+ return env.Detect('lib')
+ else:
+ # there's at least one version of MSVS installed.
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/mslink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/mslink.py
new file mode 100644
index 000000000..298ae7c64
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/mslink.py
@@ -0,0 +1,249 @@
+"""SCons.Tool.mslink
+
+Tool-specific initialization for the Microsoft linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/mslink.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Errors
+import SCons.Platform.win32
+import SCons.Tool
+import SCons.Tool.msvc
+import SCons.Tool.msvs
+import SCons.Util
+
+def pdbGenerator(env, target, source, for_signature):
+ try:
+ return ['/PDB:%s' % target[0].attributes.pdb, '/DEBUG']
+ except (AttributeError, IndexError):
+ return None
+
+def windowsShlinkTargets(target, source, env, for_signature):
+ listCmd = []
+ dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
+ if dll: listCmd.append("/out:%s"%dll.get_string(for_signature))
+
+ implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
+ if implib: listCmd.append("/implib:%s"%implib.get_string(for_signature))
+
+ return listCmd
+
+def windowsShlinkSources(target, source, env, for_signature):
+ listCmd = []
+
+ deffile = env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX")
+ for src in source:
+ if src == deffile:
+ # Treat this source as a .def file.
+ listCmd.append("/def:%s" % src.get_string(for_signature))
+ else:
+ # Just treat it as a generic source file.
+ listCmd.append(src)
+ return listCmd
+
+def windowsLibEmitter(target, source, env):
+ SCons.Tool.msvc.validate_vars(env)
+
+ extratargets = []
+ extrasources = []
+
+ dll = env.FindIxes(target, "SHLIBPREFIX", "SHLIBSUFFIX")
+ no_import_lib = env.get('no_import_lib', 0)
+
+ if not dll:
+ raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")
+
+ insert_def = env.subst("$WINDOWS_INSERT_DEF")
+ if not insert_def in ['', '0', 0] and \
+ not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
+
+ # append a def file to the list of sources
+ extrasources.append(
+ env.ReplaceIxes(dll,
+ "SHLIBPREFIX", "SHLIBSUFFIX",
+ "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
+
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
+ if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
+ # MSVC 8 automatically generates .manifest files that must be installed
+ extratargets.append(
+ env.ReplaceIxes(dll,
+ "SHLIBPREFIX", "SHLIBSUFFIX",
+ "WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
+
+ if env.has_key('PDB') and env['PDB']:
+ pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
+ extratargets.append(pdb)
+ target[0].attributes.pdb = pdb
+
+ if not no_import_lib and \
+ not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
+ # Append an import library to the list of targets.
+ extratargets.append(
+ env.ReplaceIxes(dll,
+ "SHLIBPREFIX", "SHLIBSUFFIX",
+ "LIBPREFIX", "LIBSUFFIX"))
+ # and .exp file is created if there are exports from a DLL
+ extratargets.append(
+ env.ReplaceIxes(dll,
+ "SHLIBPREFIX", "SHLIBSUFFIX",
+ "WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
+
+ return (target+extratargets, source+extrasources)
+
+def prog_emitter(target, source, env):
+ SCons.Tool.msvc.validate_vars(env)
+
+ extratargets = []
+
+ exe = env.FindIxes(target, "PROGPREFIX", "PROGSUFFIX")
+ if not exe:
+ raise SCons.Errors.UserError, "An executable should have exactly one target with the suffix: %s" % env.subst("$PROGSUFFIX")
+
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
+ if version_num >= 8.0 and env.get('WINDOWS_INSERT_MANIFEST', 0):
+ # MSVC 8 automatically generates .manifest files that have to be installed
+ extratargets.append(
+ env.ReplaceIxes(exe,
+ "PROGPREFIX", "PROGSUFFIX",
+ "WINDOWSPROGMANIFESTPREFIX", "WINDOWSPROGMANIFESTSUFFIX"))
+
+ if env.has_key('PDB') and env['PDB']:
+ pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
+ extratargets.append(pdb)
+ target[0].attributes.pdb = pdb
+
+ return (target+extratargets,source)
+
+def RegServerFunc(target, source, env):
+ if env.has_key('register') and env['register']:
+ ret = regServerAction([target[0]], [source[0]], env)
+ if ret:
+ raise SCons.Errors.UserError, "Unable to register %s" % target[0]
+ else:
+ print "Registered %s sucessfully" % target[0]
+ return ret
+ return 0
+
+regServerAction = SCons.Action.Action("$REGSVRCOM", "$REGSVRCOMSTR")
+regServerCheck = SCons.Action.Action(RegServerFunc, None)
+shlibLinkAction = SCons.Action.Action('${TEMPFILE("$SHLINK $SHLINKFLAGS $_SHLINK_TARGETS $( $_LIBDIRFLAGS $) $_LIBFLAGS $_PDB $_SHLINK_SOURCES")}')
+compositeLinkAction = shlibLinkAction + regServerCheck
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createSharedLibBuilder(env)
+ SCons.Tool.createProgBuilder(env)
+
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS /dll')
+ env['_SHLINK_TARGETS'] = windowsShlinkTargets
+ env['_SHLINK_SOURCES'] = windowsShlinkSources
+ env['SHLINKCOM'] = compositeLinkAction
+ env.Append(SHLIBEMITTER = [windowsLibEmitter])
+ env['LINK'] = 'link'
+ env['LINKFLAGS'] = SCons.Util.CLVar('/nologo')
+ env['_PDB'] = pdbGenerator
+ env['LINKCOM'] = '${TEMPFILE("$LINK $LINKFLAGS /OUT:$TARGET.windows $( $_LIBDIRFLAGS $) $_LIBFLAGS $_PDB $SOURCES.windows")}'
+ env.Append(PROGEMITTER = [prog_emitter])
+ env['LIBDIRPREFIX']='/LIBPATH:'
+ env['LIBDIRSUFFIX']=''
+ env['LIBLINKPREFIX']=''
+ env['LIBLINKSUFFIX']='$LIBSUFFIX'
+
+ env['WIN32DEFPREFIX'] = ''
+ env['WIN32DEFSUFFIX'] = '.def'
+ env['WIN32_INSERT_DEF'] = 0
+ env['WINDOWSDEFPREFIX'] = '${WIN32DEFPREFIX}'
+ env['WINDOWSDEFSUFFIX'] = '${WIN32DEFSUFFIX}'
+ env['WINDOWS_INSERT_DEF'] = '${WIN32_INSERT_DEF}'
+
+ env['WIN32EXPPREFIX'] = ''
+ env['WIN32EXPSUFFIX'] = '.exp'
+ env['WINDOWSEXPPREFIX'] = '${WIN32EXPPREFIX}'
+ env['WINDOWSEXPSUFFIX'] = '${WIN32EXPSUFFIX}'
+
+ env['WINDOWSSHLIBMANIFESTPREFIX'] = ''
+ env['WINDOWSSHLIBMANIFESTSUFFIX'] = '${SHLIBSUFFIX}.manifest'
+ env['WINDOWSPROGMANIFESTPREFIX'] = ''
+ env['WINDOWSPROGMANIFESTSUFFIX'] = '${PROGSUFFIX}.manifest'
+
+ env['REGSVRACTION'] = regServerCheck
+ env['REGSVR'] = os.path.join(SCons.Platform.win32.get_system_root(),'System32','regsvr32')
+ env['REGSVRFLAGS'] = '/s '
+ env['REGSVRCOM'] = '$REGSVR $REGSVRFLAGS ${TARGET.windows}'
+
+ try:
+ version = SCons.Tool.msvs.get_default_visualstudio_version(env)
+
+ if env.has_key('MSVS_IGNORE_IDE_PATHS') and env['MSVS_IGNORE_IDE_PATHS']:
+ include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_default_paths(env,version)
+ else:
+ include_path, lib_path, exe_path = SCons.Tool.msvc.get_msvc_paths(env,version)
+
+ # since other tools can set these, we just make sure that the
+ # relevant stuff from MSVS is in there somewhere.
+ env.PrependENVPath('INCLUDE', include_path)
+ env.PrependENVPath('LIB', lib_path)
+ env.PrependENVPath('PATH', exe_path)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ pass
+
+ # For most platforms, a loadable module is the same as a shared
+ # library. Platforms which are different can override these, but
+ # setting them the same means that LoadableModule works everywhere.
+ SCons.Tool.createLoadableModuleBuilder(env)
+ env['LDMODULE'] = '$SHLINK'
+ env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
+ env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
+ env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
+ # We can't use '$SHLINKCOM' here because that will stringify the
+ # action list on expansion, and will then try to execute expanded
+ # strings, with the upshot that it would try to execute RegServerFunc
+ # as a command.
+ env['LDMODULECOM'] = compositeLinkAction
+
+def exists(env):
+ platform = env.get('PLATFORM', '')
+ if SCons.Tool.msvs.is_msvs_installed():
+ # there's at least one version of MSVS installed.
+ return 1
+ elif platform in ('win32', 'cygwin'):
+ # Only explicitly search for a 'link' executable on Windows
+ # systems. Some other systems (e.g. Ubuntu Linux) have an
+ # executable named 'link' and we don't want that to make SCons
+ # think Visual Studio is installed.
+ return env.Detect('link')
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/msvc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/msvc.py
new file mode 100644
index 000000000..5b7874a20
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/msvc.py
@@ -0,0 +1,766 @@
+"""engine.SCons.Tool.msvc
+
+Tool-specific initialization for Microsoft Visual C/C++.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/msvc.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Errors
+import SCons.Platform.win32
+import SCons.Tool
+import SCons.Tool.msvs
+import SCons.Util
+import SCons.Warnings
+import SCons.Scanner.RC
+
+CSuffixes = ['.c', '.C']
+CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++']
+
+def _parse_msvc7_overrides(version,platform):
+ """ Parse any overridden defaults for MSVS directory locations
+ in MSVS .NET. """
+
+ # First, we get the shell folder for this user:
+ if not SCons.Util.can_read_reg:
+ raise SCons.Errors.InternalError, "No Windows registry module was found"
+
+ comps = ""
+ try:
+ (comps, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER,
+ r'Software\Microsoft\Windows\CurrentVersion' +\
+ r'\Explorer\Shell Folders\Local AppData')
+ except SCons.Util.RegError:
+ raise SCons.Errors.InternalError, \
+ "The Local AppData directory was not found in the registry."
+
+ comps = comps + '\\Microsoft\\VisualStudio\\' + version + '\\VCComponents.dat'
+ dirs = {}
+
+ if os.path.exists(comps):
+ # now we parse the directories from this file, if it exists.
+ # We only look for entries after:
+ # [VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories],
+ # since this file could contain a number of things...
+ lines = None
+ try:
+ import codecs
+ except ImportError:
+ pass
+ else:
+ try:
+ f = codecs.open(comps, 'r', 'utf16')
+ encoder = codecs.getencoder('ascii')
+ lines = map(lambda l, e=encoder: e(l)[0], f.readlines())
+ except (LookupError, UnicodeError):
+ lines = codecs.open(comps, 'r', 'utf8').readlines()
+ if lines is None:
+ lines = open(comps, 'r').readlines()
+ if 'x86' == platform: platform = 'Win32'
+
+ found = 0
+ for line in lines:
+ line.strip()
+ if line.find(r'[VC\VC_OBJECTS_PLATFORM_INFO\%s\Directories]'%platform) >= 0:
+ found = 1
+ elif line == '' or line[:1] == '[':
+ found = 0
+ elif found == 1:
+ kv = line.split('=', 1)
+ if len(kv) == 2:
+ (key, val) = kv
+ key = key.replace(' Dirs','')
+ dirs[key.upper()] = val
+ f.close()
+ else:
+ # since the file didn't exist, we have only the defaults in
+ # the registry to work with.
+
+ if 'x86' == platform: platform = 'Win32'
+
+ try:
+ K = 'SOFTWARE\\Microsoft\\VisualStudio\\' + version
+ K = K + r'\VC\VC_OBJECTS_PLATFORM_INFO\%s\Directories'%platform
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,K)
+ i = 0
+ while 1:
+ try:
+ (key,val,t) = SCons.Util.RegEnumValue(k,i)
+ key = key.replace(' Dirs','')
+ dirs[key.upper()] = val
+ i = i + 1
+ except SCons.Util.RegError:
+ break
+ except SCons.Util.RegError:
+ # if we got here, then we didn't find the registry entries:
+ raise SCons.Errors.InternalError, "Unable to find MSVC paths in the registry."
+ return dirs
+
+def _parse_msvc8_overrides(version,platform,suite):
+ """ Parse any overridden defaults for MSVC directory locations
+ in MSVC 2005. """
+
+ # In VS8 the user can change the location of the settings file that
+ # contains the include, lib and binary paths. Try to get the location
+ # from registry
+ if not SCons.Util.can_read_reg:
+ raise SCons.Errors.InternalError, "No Windows registry module was found"
+
+ # XXX This code assumes anything that isn't EXPRESS uses the default
+ # registry key string. Is this really true for all VS suites?
+ if suite == 'EXPRESS':
+ s = '\\VCExpress\\'
+ else:
+ s = '\\VisualStudio\\'
+
+ settings_path = ""
+ try:
+ (settings_path, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER,
+ r'Software\Microsoft' + s + version +\
+ r'\Profile\AutoSaveFile')
+ settings_path = settings_path.upper()
+ except SCons.Util.RegError:
+ raise SCons.Errors.InternalError, \
+ "The VS8 settings file location was not found in the registry."
+
+ # Look for potential environment variables in the settings path
+ if settings_path.find('%VSSPV_VISUALSTUDIO_DIR%') >= 0:
+ # First replace a special variable named %vsspv_visualstudio_dir%
+ # that is not found in the OSs environment variables...
+ try:
+ (value, t) = SCons.Util.RegGetValue(SCons.Util.HKEY_CURRENT_USER,
+ r'Software\Microsoft' + s + version +\
+ r'\VisualStudioLocation')
+ settings_path = settings_path.replace('%VSSPV_VISUALSTUDIO_DIR%', value)
+ except SCons.Util.RegError:
+ raise SCons.Errors.InternalError, "The VS8 settings file location was not found in the registry."
+
+ if settings_path.find('%') >= 0:
+ # Collect global environment variables
+ env_vars = {}
+
+ # Read all the global environment variables of the current user
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_CURRENT_USER, r'Environment')
+ i = 0
+ while 1:
+ try:
+ (key,val,t) = SCons.Util.RegEnumValue(k,i)
+ env_vars[key.upper()] = val.upper()
+ i = i + 1
+ except SCons.Util.RegError:
+ break
+
+ # And some more variables that are not found in the registry
+ env_vars['USERPROFILE'] = os.getenv('USERPROFILE')
+ env_vars['SystemDrive'] = os.getenv('SystemDrive')
+
+ found_var = 1
+ while found_var:
+ found_var = 0
+ for env_var in env_vars:
+ if settings_path.find(r'%' + env_var + r'%') >= 0:
+ settings_path = settings_path.replace(r'%' + env_var + r'%', env_vars[env_var])
+ found_var = 1
+
+ dirs = {}
+
+ if os.path.exists(settings_path):
+ # now we parse the directories from this file, if it exists.
+ import xml.dom.minidom
+ doc = xml.dom.minidom.parse(settings_path)
+ user_settings = doc.getElementsByTagName('UserSettings')[0]
+ tool_options = user_settings.getElementsByTagName('ToolsOptions')[0]
+ tool_options_categories = tool_options.getElementsByTagName('ToolsOptionsCategory')
+ environment_var_map = {
+ 'IncludeDirectories' : 'INCLUDE',
+ 'LibraryDirectories' : 'LIBRARY',
+ 'ExecutableDirectories' : 'PATH',
+ }
+ for category in tool_options_categories:
+ category_name = category.attributes.get('name')
+ if category_name is not None and category_name.value == 'Projects':
+ subcategories = category.getElementsByTagName('ToolsOptionsSubCategory')
+ for subcategory in subcategories:
+ subcategory_name = subcategory.attributes.get('name')
+ if subcategory_name is not None and subcategory_name.value == 'VCDirectories':
+ properties = subcategory.getElementsByTagName('PropertyValue')
+ for property in properties:
+ property_name = property.attributes.get('name')
+ if property_name is None:
+ continue
+ var_name = environment_var_map.get(property_name)
+ if var_name:
+ data = property.childNodes[0].data
+ value_list = string.split(data, '|')
+ if len(value_list) == 1:
+ dirs[var_name] = value_list[0]
+ else:
+ while value_list:
+ dest, value = value_list[:2]
+ del value_list[:2]
+ # ToDo: Support for destinations
+ # other than Win32
+ if dest == 'Win32':
+ dirs[var_name] = value
+ break
+ else:
+ # There are no default directories in the registry for VS8 Express :(
+ raise SCons.Errors.InternalError, "Unable to find MSVC paths in the registry."
+ return dirs
+
+def _get_msvc7_path(path, version, platform):
+ """
+ Get Visual Studio directories from version 7 (MSVS .NET)
+ (it has a different registry structure than versions before it)
+ """
+ # first, look for a customization of the default values in the
+ # registry: These are sometimes stored in the Local Settings area
+ # for Visual Studio, in a file, so we have to parse it.
+ dirs = _parse_msvc7_overrides(version,platform)
+
+ if dirs.has_key(path):
+ p = dirs[path]
+ else:
+ raise SCons.Errors.InternalError, \
+ "Unable to retrieve the %s path from MS VC++."%path
+
+ # collect some useful information for later expansions...
+ paths = SCons.Tool.msvs.get_msvs_install_dirs(version)
+
+ # expand the directory path variables that we support. If there
+ # is a variable we don't support, then replace that entry with
+ # "---Unknown Location VSInstallDir---" or something similar, to clue
+ # people in that we didn't find something, and so env expansion doesn't
+ # do weird things with the $(xxx)'s
+ s = re.compile('\$\(([a-zA-Z0-9_]+?)\)')
+
+ def repl(match, paths=paths):
+ key = string.upper(match.group(1))
+ if paths.has_key(key):
+ return paths[key]
+ else:
+ # Now look in the global environment variables
+ envresult = os.getenv(key)
+ if not envresult is None:
+ return envresult + '\\'
+ else:
+ return '---Unknown Location %s---' % match.group()
+
+ rv = []
+ for entry in p.split(os.pathsep):
+ entry = s.sub(repl,entry).rstrip('\n\r')
+ rv.append(entry)
+
+ return string.join(rv,os.pathsep)
+
+def _get_msvc8_path(path, version, platform, suite):
+ """
+ Get Visual Studio directories from version 8 (MSVS 2005)
+ (it has a different registry structure than versions before it)
+ """
+ # first, look for a customization of the default values in the
+ # registry: These are sometimes stored in the Local Settings area
+ # for Visual Studio, in a file, so we have to parse it.
+ dirs = _parse_msvc8_overrides(version, platform, suite)
+
+ if dirs.has_key(path):
+ p = dirs[path]
+ else:
+ raise SCons.Errors.InternalError, \
+ "Unable to retrieve the %s path from MS VC++."%path
+
+ # collect some useful information for later expansions...
+ paths = SCons.Tool.msvs.get_msvs_install_dirs(version, suite)
+
+ # expand the directory path variables that we support. If there
+ # is a variable we don't support, then replace that entry with
+ # "---Unknown Location VSInstallDir---" or something similar, to clue
+ # people in that we didn't find something, and so env expansion doesn't
+ # do weird things with the $(xxx)'s
+ s = re.compile('\$\(([a-zA-Z0-9_]+?)\)')
+
+ def repl(match, paths=paths):
+ key = string.upper(match.group(1))
+ if paths.has_key(key):
+ return paths[key]
+ else:
+ return '---Unknown Location %s---' % match.group()
+
+ rv = []
+ for entry in p.split(os.pathsep):
+ entry = s.sub(repl,entry).rstrip('\n\r')
+ rv.append(entry)
+
+ return string.join(rv,os.pathsep)
+
+def get_msvc_path(env, path, version):
+ """
+ Get a list of visualstudio directories (include, lib or path).
+ Return a string delimited by the os.pathsep separator (';'). An
+ exception will be raised if unable to access the registry or
+ appropriate registry keys not found.
+ """
+
+ if not SCons.Util.can_read_reg:
+ raise SCons.Errors.InternalError, "No Windows registry module was found"
+
+ # normalize the case for comparisons (since the registry is case
+ # insensitive)
+ path = string.upper(path)
+
+ if path=='LIB':
+ path= 'LIBRARY'
+
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(version)
+ if version_num >= 8.0:
+ platform = env.get('MSVS8_PLATFORM', 'x86')
+ suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env)
+ else:
+ platform = 'x86'
+
+ if version_num >= 8.0:
+ return _get_msvc8_path(path, str(version_num), platform, suite)
+ elif version_num >= 7.0:
+ return _get_msvc7_path(path, str(version_num), platform)
+
+ path = string.upper(path + ' Dirs')
+ K = ('Software\\Microsoft\\Devstudio\\%s\\' +
+ 'Build System\\Components\\Platforms\\Win32 (x86)\\Directories') % \
+ (version)
+ for base in (SCons.Util.HKEY_CURRENT_USER,
+ SCons.Util.HKEY_LOCAL_MACHINE):
+ try:
+ k = SCons.Util.RegOpenKeyEx(base,K)
+ i = 0
+ while 1:
+ try:
+ (p,v,t) = SCons.Util.RegEnumValue(k,i)
+ if string.upper(p) == path:
+ return v
+ i = i + 1
+ except SCons.Util.RegError:
+ break
+ except SCons.Util.RegError:
+ pass
+
+ # if we got here, then we didn't find the registry entries:
+ raise SCons.Errors.InternalError, "The %s path was not found in the registry."%path
+
+def _get_msvc6_default_paths(version, use_mfc_dirs):
+ """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those
+ three environment variables that should be set in order to execute
+ the MSVC 6.0 tools properly, if the information wasn't available
+ from the registry."""
+ MVSdir = None
+ paths = {}
+ exe_path = ''
+ lib_path = ''
+ include_path = ''
+ try:
+ paths = SCons.Tool.msvs.get_msvs_install_dirs(version)
+ MVSdir = paths['VSINSTALLDIR']
+ except (SCons.Util.RegError, SCons.Errors.InternalError, KeyError):
+ if os.environ.has_key('MSDEVDIR'):
+ MVSdir = os.path.normpath(os.path.join(os.environ['MSDEVDIR'],'..','..'))
+ else:
+ MVSdir = r'C:\Program Files\Microsoft Visual Studio'
+ if MVSdir:
+ if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'):
+ MVSVCdir = paths['VCINSTALLDIR']
+ else:
+ MVSVCdir = os.path.join(MVSdir,'VC98')
+
+ MVSCommondir = r'%s\Common' % MVSdir
+ if use_mfc_dirs:
+ mfc_include_ = r'%s\ATL\include;%s\MFC\include;' % (MVSVCdir, MVSVCdir)
+ mfc_lib_ = r'%s\MFC\lib;' % MVSVCdir
+ else:
+ mfc_include_ = ''
+ mfc_lib_ = ''
+ include_path = r'%s%s\include' % (mfc_include_, MVSVCdir)
+ lib_path = r'%s%s\lib' % (mfc_lib_, MVSVCdir)
+
+ if os.environ.has_key('OS') and os.environ['OS'] == "Windows_NT":
+ osdir = 'WINNT'
+ else:
+ osdir = 'WIN95'
+
+ exe_path = r'%s\tools\%s;%s\MSDev98\bin;%s\tools;%s\bin' % (MVSCommondir, osdir, MVSCommondir, MVSCommondir, MVSVCdir)
+ return (include_path, lib_path, exe_path)
+
+def _get_msvc7_default_paths(env, version, use_mfc_dirs):
+ """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those
+ three environment variables that should be set in order to execute
+ the MSVC .NET tools properly, if the information wasn't available
+ from the registry."""
+
+ MVSdir = None
+ paths = {}
+ exe_path = ''
+ lib_path = ''
+ include_path = ''
+ try:
+ paths = SCons.Tool.msvs.get_msvs_install_dirs(version)
+ MVSdir = paths['VSINSTALLDIR']
+ except (KeyError, SCons.Util.RegError, SCons.Errors.InternalError):
+ if os.environ.has_key('VSCOMNTOOLS'):
+ MVSdir = os.path.normpath(os.path.join(os.environ['VSCOMNTOOLS'],'..','..'))
+ else:
+ # last resort -- default install location
+ MVSdir = r'C:\Program Files\Microsoft Visual Studio .NET'
+
+ if MVSdir:
+ if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'):
+ MVSVCdir = paths['VCINSTALLDIR']
+ else:
+ MVSVCdir = os.path.join(MVSdir,'Vc7')
+
+ MVSCommondir = r'%s\Common7' % MVSdir
+ if use_mfc_dirs:
+ mfc_include_ = r'%s\atlmfc\include;' % MVSVCdir
+ mfc_lib_ = r'%s\atlmfc\lib;' % MVSVCdir
+ else:
+ mfc_include_ = ''
+ mfc_lib_ = ''
+ include_path = r'%s%s\include;%s\PlatformSDK\include' % (mfc_include_, MVSVCdir, MVSVCdir)
+ lib_path = r'%s%s\lib;%s\PlatformSDK\lib' % (mfc_lib_, MVSVCdir, MVSVCdir)
+ exe_path = r'%s\IDE;%s\bin;%s\Tools;%s\Tools\bin' % (MVSCommondir,MVSVCdir, MVSCommondir, MVSCommondir )
+
+ if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKSDKDIR'):
+ include_path = include_path + r';%s\include'%paths['FRAMEWORKSDKDIR']
+ lib_path = lib_path + r';%s\lib'%paths['FRAMEWORKSDKDIR']
+ exe_path = exe_path + r';%s\bin'%paths['FRAMEWORKSDKDIR']
+
+ if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKDIR') and paths.has_key('FRAMEWORKVERSION'):
+ exe_path = exe_path + r';%s\%s'%(paths['FRAMEWORKDIR'],paths['FRAMEWORKVERSION'])
+
+ return (include_path, lib_path, exe_path)
+
+def _get_msvc8_default_paths(env, version, suite, use_mfc_dirs):
+ """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those
+ three environment variables that should be set in order to execute
+ the MSVC 8 tools properly, if the information wasn't available
+ from the registry."""
+
+ MVSdir = None
+ paths = {}
+ exe_paths = []
+ lib_paths = []
+ include_paths = []
+ try:
+ paths = SCons.Tool.msvs.get_msvs_install_dirs(version, suite)
+ MVSdir = paths['VSINSTALLDIR']
+ except (KeyError, SCons.Util.RegError, SCons.Errors.InternalError):
+ if os.environ.has_key('VSCOMNTOOLS'):
+ MVSdir = os.path.normpath(os.path.join(os.environ['VSCOMNTOOLS'],'..','..'))
+ else:
+ # last resort -- default install location
+ MVSdir = os.getenv('ProgramFiles') + r'\Microsoft Visual Studio 8'
+
+ if MVSdir:
+ if SCons.Util.can_read_reg and paths.has_key('VCINSTALLDIR'):
+ MVSVCdir = paths['VCINSTALLDIR']
+ else:
+ MVSVCdir = os.path.join(MVSdir,'VC')
+
+ MVSCommondir = os.path.join(MVSdir, 'Common7')
+ include_paths.append( os.path.join(MVSVCdir, 'include') )
+ lib_paths.append( os.path.join(MVSVCdir, 'lib') )
+ for base, subdir in [(MVSCommondir,'IDE'), (MVSVCdir,'bin'),
+ (MVSCommondir,'Tools'), (MVSCommondir,r'Tools\bin')]:
+ exe_paths.append( os.path.join( base, subdir) )
+
+ if paths.has_key('PLATFORMSDKDIR'):
+ PlatformSdkDir = paths['PLATFORMSDKDIR']
+ else:
+ PlatformSdkDir = os.path.join(MVSVCdir,'PlatformSDK')
+ platform_include_path = os.path.join( PlatformSdkDir, 'Include' )
+ include_paths.append( platform_include_path )
+ lib_paths.append( os.path.join( PlatformSdkDir, 'Lib' ) )
+ if use_mfc_dirs:
+ if paths.has_key('PLATFORMSDKDIR'):
+ include_paths.append( os.path.join( platform_include_path, 'mfc' ) )
+ include_paths.append( os.path.join( platform_include_path, 'atl' ) )
+ else:
+ atlmfc_path = os.path.join( MVSVCdir, 'atlmfc' )
+ include_paths.append( os.path.join( atlmfc_path, 'include' ) )
+ lib_paths.append( os.path.join( atlmfc_path, 'lib' ) )
+
+ if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKSDKDIR'):
+ fwdir = paths['FRAMEWORKSDKDIR']
+ include_paths.append( os.path.join( fwdir, 'include' ) )
+ lib_paths.append( os.path.join( fwdir, 'lib' ) )
+ exe_paths.append( os.path.join( fwdir, 'bin' ) )
+
+ if SCons.Util.can_read_reg and paths.has_key('FRAMEWORKDIR') and paths.has_key('FRAMEWORKVERSION'):
+ exe_paths.append( os.path.join( paths['FRAMEWORKDIR'], paths['FRAMEWORKVERSION'] ) )
+
+ include_path = string.join( include_paths, os.pathsep )
+ lib_path = string.join(lib_paths, os.pathsep )
+ exe_path = string.join(exe_paths, os.pathsep )
+ return (include_path, lib_path, exe_path)
+
+def get_msvc_paths(env, version=None, use_mfc_dirs=0):
+ """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values
+ of those three environment variables that should be set
+ in order to execute the MSVC tools properly."""
+ exe_path = ''
+ lib_path = ''
+ include_path = ''
+
+ if not version:
+ versions = SCons.Tool.msvs.get_visualstudio_versions()
+ if versions:
+ version = versions[0] #use highest version by default
+ else:
+ version = '6.0'
+
+ # Some of the configured directories only
+ # appear if the user changes them from the default.
+ # Therefore, we'll see if we can get the path to the MSDev
+ # base installation from the registry and deduce the default
+ # directories.
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(version)
+ if version_num >= 8.0:
+ suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env)
+ defpaths = _get_msvc8_default_paths(env, version, suite, use_mfc_dirs)
+ elif version_num >= 7.0:
+ defpaths = _get_msvc7_default_paths(env, version, use_mfc_dirs)
+ else:
+ defpaths = _get_msvc6_default_paths(version, use_mfc_dirs)
+
+ try:
+ include_path = get_msvc_path(env, "include", version)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ include_path = defpaths[0]
+
+ try:
+ lib_path = get_msvc_path(env, "lib", version)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ lib_path = defpaths[1]
+
+ try:
+ exe_path = get_msvc_path(env, "path", version)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ exe_path = defpaths[2]
+
+ return (include_path, lib_path, exe_path)
+
+def get_msvc_default_paths(env, version=None, use_mfc_dirs=0):
+ """Return a 3-tuple of (INCLUDE, LIB, PATH) as the values of those
+ three environment variables that should be set in order to execute
+ the MSVC tools properly. This will only return the default
+ locations for the tools, not the values used by MSVS in their
+ directory setup area. This can help avoid problems with different
+ developers having different settings, and should allow the tools
+ to run in most cases."""
+
+ if not version and not SCons.Util.can_read_reg:
+ version = '6.0'
+
+ try:
+ if not version:
+ version = SCons.Tool.msvs.get_visualstudio_versions()[0] #use highest version
+ except KeyboardInterrupt:
+ raise
+ except:
+ pass
+
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(version)
+ if version_num >= 8.0:
+ suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env)
+ return _get_msvc8_default_paths(env, version, suite, use_mfc_dirs)
+ elif version_num >= 7.0:
+ return _get_msvc7_default_paths(env, version, use_mfc_dirs)
+ else:
+ return _get_msvc6_default_paths(version, use_mfc_dirs)
+
+def validate_vars(env):
+ """Validate the PCH and PCHSTOP construction variables."""
+ if env.has_key('PCH') and env['PCH']:
+ if not env.has_key('PCHSTOP'):
+ raise SCons.Errors.UserError, "The PCHSTOP construction must be defined if PCH is defined."
+ if not SCons.Util.is_String(env['PCHSTOP']):
+ raise SCons.Errors.UserError, "The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP']
+
+def pch_emitter(target, source, env):
+ """Adds the object file target."""
+
+ validate_vars(env)
+
+ pch = None
+ obj = None
+
+ for t in target:
+ if SCons.Util.splitext(str(t))[1] == '.pch':
+ pch = t
+ if SCons.Util.splitext(str(t))[1] == '.obj':
+ obj = t
+
+ if not obj:
+ obj = SCons.Util.splitext(str(pch))[0]+'.obj'
+
+ target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
+
+ return (target, source)
+
+def object_emitter(target, source, env, parent_emitter):
+ """Sets up the PCH dependencies for an object file."""
+
+ validate_vars(env)
+
+ parent_emitter(target, source, env)
+
+ if env.has_key('PCH') and env['PCH']:
+ env.Depends(target, env['PCH'])
+
+ return (target, source)
+
+def static_object_emitter(target, source, env):
+ return object_emitter(target, source, env,
+ SCons.Defaults.StaticObjectEmitter)
+
+def shared_object_emitter(target, source, env):
+ return object_emitter(target, source, env,
+ SCons.Defaults.SharedObjectEmitter)
+
+pch_action = SCons.Action.Action('$PCHCOM', '$PCHCOMSTR')
+pch_builder = SCons.Builder.Builder(action=pch_action, suffix='.pch',
+ emitter=pch_emitter,
+ source_scanner=SCons.Tool.SourceFileScanner)
+
+
+# Logic to build .rc files into .res files (resource files)
+res_scanner = SCons.Scanner.RC.RCScan()
+res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR')
+res_builder = SCons.Builder.Builder(action=res_action,
+ src_suffix='.rc',
+ suffix='.res',
+ src_builder=[],
+ source_scanner=res_scanner)
+
+
+def generate(env):
+ """Add Builders and construction variables for MSVC++ to an Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in CSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
+ static_obj.add_emitter(suffix, static_object_emitter)
+ shared_obj.add_emitter(suffix, shared_object_emitter)
+
+ for suffix in CXXSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CXXAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
+ static_obj.add_emitter(suffix, static_object_emitter)
+ shared_obj.add_emitter(suffix, shared_object_emitter)
+
+ env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
+ env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s /Fp%s"%(PCHSTOP or "",File(PCH))) or ""}'])
+ env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
+ env['CC'] = 'cl'
+ env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
+ env['CFLAGS'] = SCons.Util.CLVar('')
+ env['CCCOM'] = '$CC /Fo$TARGET /c $SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM'
+ env['SHCC'] = '$CC'
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
+ env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
+ env['SHCCCOM'] = '$SHCC /Fo$TARGET /c $SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM'
+ env['CXX'] = '$CC'
+ env['CXXFLAGS'] = SCons.Util.CLVar('$CCFLAGS $( /TP $)')
+ env['CXXCOM'] = '$CXX /Fo$TARGET /c $SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM'
+ env['SHCXX'] = '$CXX'
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
+ env['SHCXXCOM'] = '$SHCXX /Fo$TARGET /c $SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM'
+ env['CPPDEFPREFIX'] = '/D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '/I'
+ env['INCSUFFIX'] = ''
+# env.Append(OBJEMITTER = [static_object_emitter])
+# env.Append(SHOBJEMITTER = [shared_object_emitter])
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+
+ env['RC'] = 'rc'
+ env['RCFLAGS'] = SCons.Util.CLVar('')
+ env['RCSUFFIXES']=['.rc','.rc2']
+ env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
+ env['BUILDERS']['RES'] = res_builder
+ env['OBJPREFIX'] = ''
+ env['OBJSUFFIX'] = '.obj'
+ env['SHOBJPREFIX'] = '$OBJPREFIX'
+ env['SHOBJSUFFIX'] = '$OBJSUFFIX'
+
+ try:
+ version = SCons.Tool.msvs.get_default_visualstudio_version(env)
+ version_num, suite = SCons.Tool.msvs.msvs_parse_version(version)
+ if version_num == 8.0:
+ suite = SCons.Tool.msvs.get_default_visualstudio8_suite(env)
+
+ use_mfc_dirs = env.get('MSVS_USE_MFC_DIRS', 0)
+ if env.get('MSVS_IGNORE_IDE_PATHS', 0):
+ _get_paths = get_msvc_default_paths
+ else:
+ _get_paths = get_msvc_paths
+ include_path, lib_path, exe_path = _get_paths(env, version, use_mfc_dirs)
+
+ # since other tools can set these, we just make sure that the
+ # relevant stuff from MSVS is in there somewhere.
+ env.PrependENVPath('INCLUDE', include_path)
+ env.PrependENVPath('LIB', lib_path)
+ env.PrependENVPath('PATH', exe_path)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ pass
+
+ env['CFILESUFFIX'] = '.c'
+ env['CXXFILESUFFIX'] = '.cc'
+
+ env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
+ env['PCHCOM'] = '$CXX $CXXFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Fo${TARGETS[1]} /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
+ env['BUILDERS']['PCH'] = pch_builder
+
+ if not env.has_key('ENV'):
+ env['ENV'] = {}
+ if not env['ENV'].has_key('SystemRoot'): # required for dlls in the winsxs folders
+ env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root()
+
+def exists(env):
+ if SCons.Tool.msvs.is_msvs_installed():
+ # there's at least one version of MSVS installed.
+ return 1
+ else:
+ return env.Detect('cl')
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/msvs.py b/tools/scons/scons-local-1.2.0/SCons/Tool/msvs.py
new file mode 100644
index 000000000..f8a20ea1f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/msvs.py
@@ -0,0 +1,1815 @@
+"""SCons.Tool.msvs
+
+Tool-specific initialization for Microsoft Visual Studio project files.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/msvs.py 3842 2008/12/20 22:59:52 scons"
+
+import base64
+import hashlib
+import os.path
+import pickle
+import re
+import string
+import sys
+
+import SCons.Builder
+import SCons.Node.FS
+import SCons.Platform.win32
+import SCons.Script.SConscript
+import SCons.Util
+import SCons.Warnings
+
+##############################################################################
+# Below here are the classes and functions for generation of
+# DSP/DSW/SLN/VCPROJ files.
+##############################################################################
+
+def _hexdigest(s):
+ """Return a string as a string of hex characters.
+ """
+ # NOTE: This routine is a method in the Python 2.0 interface
+ # of the native md5 module, but we want SCons to operate all
+ # the way back to at least Python 1.5.2, which doesn't have it.
+ h = string.hexdigits
+ r = ''
+ for c in s:
+ i = ord(c)
+ r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
+ return r
+
+def xmlify(s):
+ s = string.replace(s, "&", "&amp;") # do this first
+ s = string.replace(s, "'", "&apos;")
+ s = string.replace(s, '"', "&quot;")
+ return s
+
+external_makefile_guid = '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}'
+
+def _generateGUID(slnfile, name):
+ """This generates a dummy GUID for the sln file to use. It is
+ based on the MD5 signatures of the sln filename plus the name of
+ the project. It basically just needs to be unique, and not
+ change with each invocation."""
+ m = hashlib.md5()
+ m.update(str(slnfile) + str(name))
+ # TODO(1.5)
+ #solution = m.hexdigest().upper()
+ solution = string.upper(_hexdigest(m.digest()))
+ # convert most of the signature to GUID form (discard the rest)
+ solution = "{" + solution[:8] + "-" + solution[8:12] + "-" + solution[12:16] + "-" + solution[16:20] + "-" + solution[20:32] + "}"
+ return solution
+
+version_re = re.compile(r'(\d+\.\d+)(.*)')
+
+def msvs_parse_version(s):
+ """
+ Split a Visual Studio version, which may in fact be something like
+ '7.0Exp', into is version number (returned as a float) and trailing
+ "suite" portion.
+ """
+ num, suite = version_re.match(s).groups()
+ return float(num), suite
+
+# This is how we re-invoke SCons from inside MSVS Project files.
+# The problem is that we might have been invoked as either scons.bat
+# or scons.py. If we were invoked directly as scons.py, then we could
+# use sys.argv[0] to find the SCons "executable," but that doesn't work
+# if we were invoked as scons.bat, which uses "python -c" to execute
+# things and ends up with "-c" as sys.argv[0]. Consequently, we have
+# the MSVS Project file invoke SCons the same way that scons.bat does,
+# which works regardless of how we were invoked.
+def getExecScriptMain(env, xml=None):
+ scons_home = env.get('SCONS_HOME')
+ if not scons_home and os.environ.has_key('SCONS_LIB_DIR'):
+ scons_home = os.environ['SCONS_LIB_DIR']
+ if scons_home:
+ exec_script_main = "from os.path import join; import sys; sys.path = [ r'%s' ] + sys.path; import SCons.Script; SCons.Script.main()" % scons_home
+ else:
+ version = SCons.__version__
+ exec_script_main = "from os.path import join; import sys; sys.path = [ join(sys.prefix, 'Lib', 'site-packages', 'scons-%(version)s'), join(sys.prefix, 'scons-%(version)s'), join(sys.prefix, 'Lib', 'site-packages', 'scons'), join(sys.prefix, 'scons') ] + sys.path; import SCons.Script; SCons.Script.main()" % locals()
+ if xml:
+ exec_script_main = xmlify(exec_script_main)
+ return exec_script_main
+
+# The string for the Python executable we tell the Project file to use
+# is either sys.executable or, if an external PYTHON_ROOT environment
+# variable exists, $(PYTHON)ROOT\\python.exe (generalized a little to
+# pluck the actual executable name from sys.executable).
+try:
+ python_root = os.environ['PYTHON_ROOT']
+except KeyError:
+ python_executable = sys.executable
+else:
+ python_executable = os.path.join('$$(PYTHON_ROOT)',
+ os.path.split(sys.executable)[1])
+
+class Config:
+ pass
+
+def splitFully(path):
+ dir, base = os.path.split(path)
+ if dir and dir != '' and dir != path:
+ return splitFully(dir)+[base]
+ if base == '':
+ return []
+ return [base]
+
+def makeHierarchy(sources):
+ '''Break a list of files into a hierarchy; for each value, if it is a string,
+ then it is a file. If it is a dictionary, it is a folder. The string is
+ the original path of the file.'''
+
+ hierarchy = {}
+ for file in sources:
+ path = splitFully(file)
+ if len(path):
+ dict = hierarchy
+ for part in path[:-1]:
+ if not dict.has_key(part):
+ dict[part] = {}
+ dict = dict[part]
+ dict[path[-1]] = file
+ #else:
+ # print 'Warning: failed to decompose path for '+str(file)
+ return hierarchy
+
+class _DSPGenerator:
+ """ Base class for DSP generators """
+
+ srcargs = [
+ 'srcs',
+ 'incs',
+ 'localincs',
+ 'resources',
+ 'misc']
+
+ def __init__(self, dspfile, source, env):
+ self.dspfile = str(dspfile)
+ try:
+ get_abspath = dspfile.get_abspath
+ except AttributeError:
+ self.dspabs = os.path.abspath(dspfile)
+ else:
+ self.dspabs = get_abspath()
+
+ if not env.has_key('variant'):
+ raise SCons.Errors.InternalError, \
+ "You must specify a 'variant' argument (i.e. 'Debug' or " +\
+ "'Release') to create an MSVSProject."
+ elif SCons.Util.is_String(env['variant']):
+ variants = [env['variant']]
+ elif SCons.Util.is_List(env['variant']):
+ variants = env['variant']
+
+ if not env.has_key('buildtarget') or env['buildtarget'] == None:
+ buildtarget = ['']
+ elif SCons.Util.is_String(env['buildtarget']):
+ buildtarget = [env['buildtarget']]
+ elif SCons.Util.is_List(env['buildtarget']):
+ if len(env['buildtarget']) != len(variants):
+ raise SCons.Errors.InternalError, \
+ "Sizes of 'buildtarget' and 'variant' lists must be the same."
+ buildtarget = []
+ for bt in env['buildtarget']:
+ if SCons.Util.is_String(bt):
+ buildtarget.append(bt)
+ else:
+ buildtarget.append(bt.get_abspath())
+ else:
+ buildtarget = [env['buildtarget'].get_abspath()]
+ if len(buildtarget) == 1:
+ bt = buildtarget[0]
+ buildtarget = []
+ for _ in variants:
+ buildtarget.append(bt)
+
+ if not env.has_key('outdir') or env['outdir'] == None:
+ outdir = ['']
+ elif SCons.Util.is_String(env['outdir']):
+ outdir = [env['outdir']]
+ elif SCons.Util.is_List(env['outdir']):
+ if len(env['outdir']) != len(variants):
+ raise SCons.Errors.InternalError, \
+ "Sizes of 'outdir' and 'variant' lists must be the same."
+ outdir = []
+ for s in env['outdir']:
+ if SCons.Util.is_String(s):
+ outdir.append(s)
+ else:
+ outdir.append(s.get_abspath())
+ else:
+ outdir = [env['outdir'].get_abspath()]
+ if len(outdir) == 1:
+ s = outdir[0]
+ outdir = []
+ for v in variants:
+ outdir.append(s)
+
+ if not env.has_key('runfile') or env['runfile'] == None:
+ runfile = buildtarget[-1:]
+ elif SCons.Util.is_String(env['runfile']):
+ runfile = [env['runfile']]
+ elif SCons.Util.is_List(env['runfile']):
+ if len(env['runfile']) != len(variants):
+ raise SCons.Errors.InternalError, \
+ "Sizes of 'runfile' and 'variant' lists must be the same."
+ runfile = []
+ for s in env['runfile']:
+ if SCons.Util.is_String(s):
+ runfile.append(s)
+ else:
+ runfile.append(s.get_abspath())
+ else:
+ runfile = [env['runfile'].get_abspath()]
+ if len(runfile) == 1:
+ s = runfile[0]
+ runfile = []
+ for v in variants:
+ runfile.append(s)
+
+ self.sconscript = env['MSVSSCONSCRIPT']
+
+ cmdargs = env.get('cmdargs', '')
+
+ self.env = env
+
+ if self.env.has_key('name'):
+ self.name = self.env['name']
+ else:
+ self.name = os.path.basename(SCons.Util.splitext(self.dspfile)[0])
+ self.name = self.env.subst(self.name)
+
+ sourcenames = [
+ 'Source Files',
+ 'Header Files',
+ 'Local Headers',
+ 'Resource Files',
+ 'Other Files']
+
+ self.sources = {}
+ for n in sourcenames:
+ self.sources[n] = []
+
+ self.configs = {}
+
+ self.nokeep = 0
+ if env.has_key('nokeep') and env['variant'] != 0:
+ self.nokeep = 1
+
+ if self.nokeep == 0 and os.path.exists(self.dspabs):
+ self.Parse()
+
+ for t in zip(sourcenames,self.srcargs):
+ if self.env.has_key(t[1]):
+ if SCons.Util.is_List(self.env[t[1]]):
+ for i in self.env[t[1]]:
+ if not i in self.sources[t[0]]:
+ self.sources[t[0]].append(i)
+ else:
+ if not self.env[t[1]] in self.sources[t[0]]:
+ self.sources[t[0]].append(self.env[t[1]])
+
+ for n in sourcenames:
+ # TODO(1.5):
+ #self.sources[n].sort(lambda a, b: cmp(a.lower(), b.lower()))
+ self.sources[n].sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
+
+ def AddConfig(self, variant, buildtarget, outdir, runfile, cmdargs, dspfile=dspfile):
+ config = Config()
+ config.buildtarget = buildtarget
+ config.outdir = outdir
+ config.cmdargs = cmdargs
+ config.runfile = runfile
+
+ match = re.match('(.*)\|(.*)', variant)
+ if match:
+ config.variant = match.group(1)
+ config.platform = match.group(2)
+ else:
+ config.variant = variant
+ config.platform = 'Win32'
+
+ self.configs[variant] = config
+ print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dspfile) + "'"
+
+ for i in range(len(variants)):
+ AddConfig(self, variants[i], buildtarget[i], outdir[i], runfile[i], cmdargs)
+
+ self.platforms = []
+ for key in self.configs.keys():
+ platform = self.configs[key].platform
+ if not platform in self.platforms:
+ self.platforms.append(platform)
+
+ def Build(self):
+ pass
+
+V6DSPHeader = """\
+# Microsoft Developer Studio Project File - Name="%(name)s" - Package Owner=<4>
+# Microsoft Developer Studio Generated Build File, Format Version 6.00
+# ** DO NOT EDIT **
+
+# TARGTYPE "Win32 (x86) External Target" 0x0106
+
+CFG=%(name)s - Win32 %(confkey)s
+!MESSAGE This is not a valid makefile. To build this project using NMAKE,
+!MESSAGE use the Export Makefile command and run
+!MESSAGE
+!MESSAGE NMAKE /f "%(name)s.mak".
+!MESSAGE
+!MESSAGE You can specify a configuration when running NMAKE
+!MESSAGE by defining the macro CFG on the command line. For example:
+!MESSAGE
+!MESSAGE NMAKE /f "%(name)s.mak" CFG="%(name)s - Win32 %(confkey)s"
+!MESSAGE
+!MESSAGE Possible choices for configuration are:
+!MESSAGE
+"""
+
+class _GenerateV6DSP(_DSPGenerator):
+ """Generates a Project file for MSVS 6.0"""
+
+ def PrintHeader(self):
+ # pick a default config
+ confkeys = self.configs.keys()
+ confkeys.sort()
+
+ name = self.name
+ confkey = confkeys[0]
+
+ self.file.write(V6DSPHeader % locals())
+
+ for kind in confkeys:
+ self.file.write('!MESSAGE "%s - Win32 %s" (based on "Win32 (x86) External Target")\n' % (name, kind))
+
+ self.file.write('!MESSAGE \n\n')
+
+ def PrintProject(self):
+ name = self.name
+ self.file.write('# Begin Project\n'
+ '# PROP AllowPerConfigDependencies 0\n'
+ '# PROP Scc_ProjName ""\n'
+ '# PROP Scc_LocalPath ""\n\n')
+
+ first = 1
+ confkeys = self.configs.keys()
+ confkeys.sort()
+ for kind in confkeys:
+ outdir = self.configs[kind].outdir
+ buildtarget = self.configs[kind].buildtarget
+ if first == 1:
+ self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind))
+ first = 0
+ else:
+ self.file.write('\n!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name, kind))
+
+ env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET')
+ if not env_has_buildtarget:
+ self.env['MSVSBUILDTARGET'] = buildtarget
+
+ # have to write this twice, once with the BASE settings, and once without
+ for base in ("BASE ",""):
+ self.file.write('# PROP %sUse_MFC 0\n'
+ '# PROP %sUse_Debug_Libraries ' % (base, base))
+ # TODO(1.5):
+ #if kind.lower().find('debug') < 0:
+ if string.find(string.lower(kind), 'debug') < 0:
+ self.file.write('0\n')
+ else:
+ self.file.write('1\n')
+ self.file.write('# PROP %sOutput_Dir "%s"\n'
+ '# PROP %sIntermediate_Dir "%s"\n' % (base,outdir,base,outdir))
+ cmd = 'echo Starting SCons && ' + self.env.subst('$MSVSBUILDCOM', 1)
+ self.file.write('# PROP %sCmd_Line "%s"\n'
+ '# PROP %sRebuild_Opt "-c && %s"\n'
+ '# PROP %sTarget_File "%s"\n'
+ '# PROP %sBsc_Name ""\n'
+ '# PROP %sTarget_Dir ""\n'\
+ %(base,cmd,base,cmd,base,buildtarget,base,base))
+
+ if not env_has_buildtarget:
+ del self.env['MSVSBUILDTARGET']
+
+ self.file.write('\n!ENDIF\n\n'
+ '# Begin Target\n\n')
+ for kind in confkeys:
+ self.file.write('# Name "%s - Win32 %s"\n' % (name,kind))
+ self.file.write('\n')
+ first = 0
+ for kind in confkeys:
+ if first == 0:
+ self.file.write('!IF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind))
+ first = 1
+ else:
+ self.file.write('!ELSEIF "$(CFG)" == "%s - Win32 %s"\n\n' % (name,kind))
+ self.file.write('!ENDIF \n\n')
+ self.PrintSourceFiles()
+ self.file.write('# End Target\n'
+ '# End Project\n')
+
+ if self.nokeep == 0:
+ # now we pickle some data and add it to the file -- MSDEV will ignore it.
+ pdata = pickle.dumps(self.configs,1)
+ pdata = base64.encodestring(pdata)
+ self.file.write(pdata + '\n')
+ pdata = pickle.dumps(self.sources,1)
+ pdata = base64.encodestring(pdata)
+ self.file.write(pdata + '\n')
+
+ def PrintSourceFiles(self):
+ categories = {'Source Files': 'cpp|c|cxx|l|y|def|odl|idl|hpj|bat',
+ 'Header Files': 'h|hpp|hxx|hm|inl',
+ 'Local Headers': 'h|hpp|hxx|hm|inl',
+ 'Resource Files': 'r|rc|ico|cur|bmp|dlg|rc2|rct|bin|cnt|rtf|gif|jpg|jpeg|jpe',
+ 'Other Files': ''}
+
+ cats = categories.keys()
+ # TODO(1.5):
+ #cats.sort(lambda a, b: cmp(a.lower(), b.lower()))
+ cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
+ for kind in cats:
+ if not self.sources[kind]:
+ continue # skip empty groups
+
+ self.file.write('# Begin Group "' + kind + '"\n\n')
+ # TODO(1.5)
+ #typelist = categories[kind].replace('|', ';')
+ typelist = string.replace(categories[kind], '|', ';')
+ self.file.write('# PROP Default_Filter "' + typelist + '"\n')
+
+ for file in self.sources[kind]:
+ file = os.path.normpath(file)
+ self.file.write('# Begin Source File\n\n'
+ 'SOURCE="' + file + '"\n'
+ '# End Source File\n')
+ self.file.write('# End Group\n')
+
+ # add the SConscript file outside of the groups
+ self.file.write('# Begin Source File\n\n'
+ 'SOURCE="' + str(self.sconscript) + '"\n'
+ '# End Source File\n')
+
+ def Parse(self):
+ try:
+ dspfile = open(self.dspabs,'r')
+ except IOError:
+ return # doesn't exist yet, so can't add anything to configs.
+
+ line = dspfile.readline()
+ while line:
+ # TODO(1.5):
+ #if line.find("# End Project") > -1:
+ if string.find(line, "# End Project") > -1:
+ break
+ line = dspfile.readline()
+
+ line = dspfile.readline()
+ datas = line
+ while line and line != '\n':
+ line = dspfile.readline()
+ datas = datas + line
+
+ # OK, we've found our little pickled cache of data.
+ try:
+ datas = base64.decodestring(datas)
+ data = pickle.loads(datas)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return # unable to unpickle any data for some reason
+
+ self.configs.update(data)
+
+ data = None
+ line = dspfile.readline()
+ datas = line
+ while line and line != '\n':
+ line = dspfile.readline()
+ datas = datas + line
+
+ # OK, we've found our little pickled cache of data.
+ # it has a "# " in front of it, so we strip that.
+ try:
+ datas = base64.decodestring(datas)
+ data = pickle.loads(datas)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return # unable to unpickle any data for some reason
+
+ self.sources.update(data)
+
+ def Build(self):
+ try:
+ self.file = open(self.dspabs,'w')
+ except IOError, detail:
+ raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail)
+ else:
+ self.PrintHeader()
+ self.PrintProject()
+ self.file.close()
+
+V7DSPHeader = """\
+<?xml version="1.0" encoding = "%(encoding)s"?>
+<VisualStudioProject
+\tProjectType="Visual C++"
+\tVersion="%(versionstr)s"
+\tName="%(name)s"
+%(scc_attrs)s
+\tKeyword="MakeFileProj">
+"""
+
+V7DSPConfiguration = """\
+\t\t<Configuration
+\t\t\tName="%(variant)s|%(platform)s"
+\t\t\tOutputDirectory="%(outdir)s"
+\t\t\tIntermediateDirectory="%(outdir)s"
+\t\t\tConfigurationType="0"
+\t\t\tUseOfMFC="0"
+\t\t\tATLMinimizesCRunTimeLibraryUsage="FALSE">
+\t\t\t<Tool
+\t\t\t\tName="VCNMakeTool"
+\t\t\t\tBuildCommandLine="%(buildcmd)s"
+\t\t\t\tCleanCommandLine="%(cleancmd)s"
+\t\t\t\tRebuildCommandLine="%(rebuildcmd)s"
+\t\t\t\tOutput="%(runfile)s"/>
+\t\t</Configuration>
+"""
+
+V8DSPHeader = """\
+<?xml version="1.0" encoding="%(encoding)s"?>
+<VisualStudioProject
+\tProjectType="Visual C++"
+\tVersion="%(versionstr)s"
+\tName="%(name)s"
+%(scc_attrs)s
+\tRootNamespace="%(name)s"
+\tKeyword="MakeFileProj">
+"""
+
+V8DSPConfiguration = """\
+\t\t<Configuration
+\t\t\tName="%(variant)s|Win32"
+\t\t\tConfigurationType="0"
+\t\t\tUseOfMFC="0"
+\t\t\tATLMinimizesCRunTimeLibraryUsage="false"
+\t\t\t>
+\t\t\t<Tool
+\t\t\t\tName="VCNMakeTool"
+\t\t\t\tBuildCommandLine="%(buildcmd)s"
+\t\t\t\tReBuildCommandLine="%(rebuildcmd)s"
+\t\t\t\tCleanCommandLine="%(cleancmd)s"
+\t\t\t\tOutput="%(runfile)s"
+\t\t\t\tPreprocessorDefinitions=""
+\t\t\t\tIncludeSearchPath=""
+\t\t\t\tForcedIncludes=""
+\t\t\t\tAssemblySearchPath=""
+\t\t\t\tForcedUsingAssemblies=""
+\t\t\t\tCompileAsManaged=""
+\t\t\t/>
+\t\t</Configuration>
+"""
+class _GenerateV7DSP(_DSPGenerator):
+ """Generates a Project file for MSVS .NET"""
+
+ def __init__(self, dspfile, source, env):
+ _DSPGenerator.__init__(self, dspfile, source, env)
+ self.version = env['MSVS_VERSION']
+ self.version_num, self.suite = msvs_parse_version(self.version)
+ if self.version_num >= 8.0:
+ self.versionstr = '8.00'
+ self.dspheader = V8DSPHeader
+ self.dspconfiguration = V8DSPConfiguration
+ else:
+ if self.version_num >= 7.1:
+ self.versionstr = '7.10'
+ else:
+ self.versionstr = '7.00'
+ self.dspheader = V7DSPHeader
+ self.dspconfiguration = V7DSPConfiguration
+ self.file = None
+
+ def PrintHeader(self):
+ env = self.env
+ versionstr = self.versionstr
+ name = self.name
+ encoding = self.env.subst('$MSVSENCODING')
+ scc_provider = env.get('MSVS_SCC_PROVIDER', '')
+ scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '')
+ scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '')
+ scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '')
+ project_guid = env.get('MSVS_PROJECT_GUID', '')
+ if self.version_num >= 8.0 and not project_guid:
+ project_guid = _generateGUID(self.dspfile, '')
+ if scc_provider != '':
+ scc_attrs = ('\tProjectGUID="%s"\n'
+ '\tSccProjectName="%s"\n'
+ '\tSccAuxPath="%s"\n'
+ '\tSccLocalPath="%s"\n'
+ '\tSccProvider="%s"' % (project_guid, scc_project_name, scc_aux_path, scc_local_path, scc_provider))
+ else:
+ scc_attrs = ('\tProjectGUID="%s"\n'
+ '\tSccProjectName="%s"\n'
+ '\tSccLocalPath="%s"' % (project_guid, scc_project_name, scc_local_path))
+
+ self.file.write(self.dspheader % locals())
+
+ self.file.write('\t<Platforms>\n')
+ for platform in self.platforms:
+ self.file.write(
+ '\t\t<Platform\n'
+ '\t\t\tName="%s"/>\n' % platform)
+ self.file.write('\t</Platforms>\n')
+
+ if self.version_num >= 8.0:
+ self.file.write('\t<ToolFiles>\n'
+ '\t</ToolFiles>\n')
+
+ def PrintProject(self):
+ self.file.write('\t<Configurations>\n')
+
+ confkeys = self.configs.keys()
+ confkeys.sort()
+ for kind in confkeys:
+ variant = self.configs[kind].variant
+ platform = self.configs[kind].platform
+ outdir = self.configs[kind].outdir
+ buildtarget = self.configs[kind].buildtarget
+ runfile = self.configs[kind].runfile
+ cmdargs = self.configs[kind].cmdargs
+
+ env_has_buildtarget = self.env.has_key('MSVSBUILDTARGET')
+ if not env_has_buildtarget:
+ self.env['MSVSBUILDTARGET'] = buildtarget
+
+ starting = 'echo Starting SCons && '
+ if cmdargs:
+ cmdargs = ' ' + cmdargs
+ else:
+ cmdargs = ''
+ buildcmd = xmlify(starting + self.env.subst('$MSVSBUILDCOM', 1) + cmdargs)
+ rebuildcmd = xmlify(starting + self.env.subst('$MSVSREBUILDCOM', 1) + cmdargs)
+ cleancmd = xmlify(starting + self.env.subst('$MSVSCLEANCOM', 1) + cmdargs)
+
+ if not env_has_buildtarget:
+ del self.env['MSVSBUILDTARGET']
+
+ self.file.write(self.dspconfiguration % locals())
+
+ self.file.write('\t</Configurations>\n')
+
+ if self.version_num >= 7.1:
+ self.file.write('\t<References>\n'
+ '\t</References>\n')
+
+ self.PrintSourceFiles()
+
+ self.file.write('</VisualStudioProject>\n')
+
+ if self.nokeep == 0:
+ # now we pickle some data and add it to the file -- MSDEV will ignore it.
+ pdata = pickle.dumps(self.configs,1)
+ pdata = base64.encodestring(pdata)
+ self.file.write('<!-- SCons Data:\n' + pdata + '\n')
+ pdata = pickle.dumps(self.sources,1)
+ pdata = base64.encodestring(pdata)
+ self.file.write(pdata + '-->\n')
+
+ def printSources(self, hierarchy, commonprefix):
+ sorteditems = hierarchy.items()
+ # TODO(1.5):
+ #sorteditems.sort(lambda a, b: cmp(a[0].lower(), b[0].lower()))
+ sorteditems.sort(lambda a, b: cmp(string.lower(a[0]), string.lower(b[0])))
+
+ # First folders, then files
+ for key, value in sorteditems:
+ if SCons.Util.is_Dict(value):
+ self.file.write('\t\t\t<Filter\n'
+ '\t\t\t\tName="%s"\n'
+ '\t\t\t\tFilter="">\n' % (key))
+ self.printSources(value, commonprefix)
+ self.file.write('\t\t\t</Filter>\n')
+
+ for key, value in sorteditems:
+ if SCons.Util.is_String(value):
+ file = value
+ if commonprefix:
+ file = os.path.join(commonprefix, value)
+ file = os.path.normpath(file)
+ self.file.write('\t\t\t<File\n'
+ '\t\t\t\tRelativePath="%s">\n'
+ '\t\t\t</File>\n' % (file))
+
+ def PrintSourceFiles(self):
+ categories = {'Source Files': 'cpp;c;cxx;l;y;def;odl;idl;hpj;bat',
+ 'Header Files': 'h;hpp;hxx;hm;inl',
+ 'Local Headers': 'h;hpp;hxx;hm;inl',
+ 'Resource Files': 'r;rc;ico;cur;bmp;dlg;rc2;rct;bin;cnt;rtf;gif;jpg;jpeg;jpe',
+ 'Other Files': ''}
+
+ self.file.write('\t<Files>\n')
+
+ cats = categories.keys()
+ # TODO(1.5)
+ #cats.sort(lambda a, b: cmp(a.lower(), b.lower()))
+ cats.sort(lambda a, b: cmp(string.lower(a), string.lower(b)))
+ cats = filter(lambda k, s=self: s.sources[k], cats)
+ for kind in cats:
+ if len(cats) > 1:
+ self.file.write('\t\t<Filter\n'
+ '\t\t\tName="%s"\n'
+ '\t\t\tFilter="%s">\n' % (kind, categories[kind]))
+
+ sources = self.sources[kind]
+
+ # First remove any common prefix
+ commonprefix = None
+ if len(sources) > 1:
+ s = map(os.path.normpath, sources)
+ # take the dirname because the prefix may include parts
+ # of the filenames (e.g. if you have 'dir\abcd' and
+ # 'dir\acde' then the cp will be 'dir\a' )
+ cp = os.path.dirname( os.path.commonprefix(s) )
+ if cp and s[0][len(cp)] == os.sep:
+ # +1 because the filename starts after the separator
+ sources = map(lambda s, l=len(cp)+1: s[l:], sources)
+ commonprefix = cp
+ elif len(sources) == 1:
+ commonprefix = os.path.dirname( sources[0] )
+ sources[0] = os.path.basename( sources[0] )
+
+ hierarchy = makeHierarchy(sources)
+ self.printSources(hierarchy, commonprefix=commonprefix)
+
+ if len(cats)>1:
+ self.file.write('\t\t</Filter>\n')
+
+ # add the SConscript file outside of the groups
+ self.file.write('\t\t<File\n'
+ '\t\t\tRelativePath="%s">\n'
+ '\t\t</File>\n' % str(self.sconscript))
+
+ self.file.write('\t</Files>\n'
+ '\t<Globals>\n'
+ '\t</Globals>\n')
+
+ def Parse(self):
+ try:
+ dspfile = open(self.dspabs,'r')
+ except IOError:
+ return # doesn't exist yet, so can't add anything to configs.
+
+ line = dspfile.readline()
+ while line:
+ # TODO(1.5)
+ #if line.find('<!-- SCons Data:') > -1:
+ if string.find(line, '<!-- SCons Data:') > -1:
+ break
+ line = dspfile.readline()
+
+ line = dspfile.readline()
+ datas = line
+ while line and line != '\n':
+ line = dspfile.readline()
+ datas = datas + line
+
+ # OK, we've found our little pickled cache of data.
+ try:
+ datas = base64.decodestring(datas)
+ data = pickle.loads(datas)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return # unable to unpickle any data for some reason
+
+ self.configs.update(data)
+
+ data = None
+ line = dspfile.readline()
+ datas = line
+ while line and line != '\n':
+ line = dspfile.readline()
+ datas = datas + line
+
+ # OK, we've found our little pickled cache of data.
+ try:
+ datas = base64.decodestring(datas)
+ data = pickle.loads(datas)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return # unable to unpickle any data for some reason
+
+ self.sources.update(data)
+
+ def Build(self):
+ try:
+ self.file = open(self.dspabs,'w')
+ except IOError, detail:
+ raise SCons.Errors.InternalError, 'Unable to open "' + self.dspabs + '" for writing:' + str(detail)
+ else:
+ self.PrintHeader()
+ self.PrintProject()
+ self.file.close()
+
+class _DSWGenerator:
+ """ Base class for DSW generators """
+ def __init__(self, dswfile, source, env):
+ self.dswfile = os.path.normpath(str(dswfile))
+ self.env = env
+
+ if not env.has_key('projects'):
+ raise SCons.Errors.UserError, \
+ "You must specify a 'projects' argument to create an MSVSSolution."
+ projects = env['projects']
+ if not SCons.Util.is_List(projects):
+ raise SCons.Errors.InternalError, \
+ "The 'projects' argument must be a list of nodes."
+ projects = SCons.Util.flatten(projects)
+ if len(projects) < 1:
+ raise SCons.Errors.UserError, \
+ "You must specify at least one project to create an MSVSSolution."
+ self.dspfiles = map(str, projects)
+
+ if self.env.has_key('name'):
+ self.name = self.env['name']
+ else:
+ self.name = os.path.basename(SCons.Util.splitext(self.dswfile)[0])
+ self.name = self.env.subst(self.name)
+
+ def Build(self):
+ pass
+
+class _GenerateV7DSW(_DSWGenerator):
+ """Generates a Solution file for MSVS .NET"""
+ def __init__(self, dswfile, source, env):
+ _DSWGenerator.__init__(self, dswfile, source, env)
+
+ self.file = None
+ self.version = self.env['MSVS_VERSION']
+ self.version_num, self.suite = msvs_parse_version(self.version)
+ self.versionstr = '7.00'
+ if self.version_num >= 8.0:
+ self.versionstr = '9.00'
+ elif self.version_num >= 7.1:
+ self.versionstr = '8.00'
+ if self.version_num >= 8.0:
+ self.versionstr = '9.00'
+
+ if env.has_key('slnguid') and env['slnguid']:
+ self.slnguid = env['slnguid']
+ else:
+ self.slnguid = _generateGUID(dswfile, self.name)
+
+ self.configs = {}
+
+ self.nokeep = 0
+ if env.has_key('nokeep') and env['variant'] != 0:
+ self.nokeep = 1
+
+ if self.nokeep == 0 and os.path.exists(self.dswfile):
+ self.Parse()
+
+ def AddConfig(self, variant, dswfile=dswfile):
+ config = Config()
+
+ match = re.match('(.*)\|(.*)', variant)
+ if match:
+ config.variant = match.group(1)
+ config.platform = match.group(2)
+ else:
+ config.variant = variant
+ config.platform = 'Win32'
+
+ self.configs[variant] = config
+ print "Adding '" + self.name + ' - ' + config.variant + '|' + config.platform + "' to '" + str(dswfile) + "'"
+
+ if not env.has_key('variant'):
+ raise SCons.Errors.InternalError, \
+ "You must specify a 'variant' argument (i.e. 'Debug' or " +\
+ "'Release') to create an MSVS Solution File."
+ elif SCons.Util.is_String(env['variant']):
+ AddConfig(self, env['variant'])
+ elif SCons.Util.is_List(env['variant']):
+ for variant in env['variant']:
+ AddConfig(self, variant)
+
+ self.platforms = []
+ for key in self.configs.keys():
+ platform = self.configs[key].platform
+ if not platform in self.platforms:
+ self.platforms.append(platform)
+
+ def Parse(self):
+ try:
+ dswfile = open(self.dswfile,'r')
+ except IOError:
+ return # doesn't exist yet, so can't add anything to configs.
+
+ line = dswfile.readline()
+ while line:
+ if line[:9] == "EndGlobal":
+ break
+ line = dswfile.readline()
+
+ line = dswfile.readline()
+ datas = line
+ while line:
+ line = dswfile.readline()
+ datas = datas + line
+
+ # OK, we've found our little pickled cache of data.
+ try:
+ datas = base64.decodestring(datas)
+ data = pickle.loads(datas)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return # unable to unpickle any data for some reason
+
+ self.configs.update(data)
+
+ def PrintSolution(self):
+ """Writes a solution file"""
+ self.file.write('Microsoft Visual Studio Solution File, Format Version %s\n' % self.versionstr )
+ if self.version_num >= 8.0:
+ self.file.write('# Visual Studio 2005\n')
+ for p in self.dspfiles:
+ name = os.path.basename(p)
+ base, suffix = SCons.Util.splitext(name)
+ if suffix == '.vcproj':
+ name = base
+ guid = _generateGUID(p, '')
+ self.file.write('Project("%s") = "%s", "%s", "%s"\n'
+ % ( external_makefile_guid, name, p, guid ) )
+ if self.version_num >= 7.1 and self.version_num < 8.0:
+ self.file.write('\tProjectSection(ProjectDependencies) = postProject\n'
+ '\tEndProjectSection\n')
+ self.file.write('EndProject\n')
+
+ self.file.write('Global\n')
+
+ env = self.env
+ if env.has_key('MSVS_SCC_PROVIDER'):
+ dspfile_base = os.path.basename(self.dspfile)
+ slnguid = self.slnguid
+ scc_provider = env.get('MSVS_SCC_PROVIDER', '')
+ scc_provider = string.replace(scc_provider, ' ', r'\u0020')
+ scc_project_name = env.get('MSVS_SCC_PROJECT_NAME', '')
+ # scc_aux_path = env.get('MSVS_SCC_AUX_PATH', '')
+ scc_local_path = env.get('MSVS_SCC_LOCAL_PATH', '')
+ scc_project_base_path = env.get('MSVS_SCC_PROJECT_BASE_PATH', '')
+ # project_guid = env.get('MSVS_PROJECT_GUID', '')
+
+ self.file.write('\tGlobalSection(SourceCodeControl) = preSolution\n'
+ '\t\tSccNumberOfProjects = 2\n'
+ '\t\tSccProjectUniqueName0 = %(dspfile_base)s\n'
+ '\t\tSccLocalPath0 = %(scc_local_path)s\n'
+ '\t\tCanCheckoutShared = true\n'
+ '\t\tSccProjectFilePathRelativizedFromConnection0 = %(scc_project_base_path)s\n'
+ '\t\tSccProjectName1 = %(scc_project_name)s\n'
+ '\t\tSccLocalPath1 = %(scc_local_path)s\n'
+ '\t\tSccProvider1 = %(scc_provider)s\n'
+ '\t\tCanCheckoutShared = true\n'
+ '\t\tSccProjectFilePathRelativizedFromConnection1 = %(scc_project_base_path)s\n'
+ '\t\tSolutionUniqueID = %(slnguid)s\n'
+ '\tEndGlobalSection\n' % locals())
+
+ if self.version_num >= 8.0:
+ self.file.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
+ else:
+ self.file.write('\tGlobalSection(SolutionConfiguration) = preSolution\n')
+
+ confkeys = self.configs.keys()
+ confkeys.sort()
+ cnt = 0
+ for name in confkeys:
+ variant = self.configs[name].variant
+ platform = self.configs[name].platform
+ if self.version_num >= 8.0:
+ self.file.write('\t\t%s|%s = %s|%s\n' % (variant, platform, variant, platform))
+ else:
+ self.file.write('\t\tConfigName.%d = %s\n' % (cnt, variant))
+ cnt = cnt + 1
+ self.file.write('\tEndGlobalSection\n')
+ if self.version_num < 7.1:
+ self.file.write('\tGlobalSection(ProjectDependencies) = postSolution\n'
+ '\tEndGlobalSection\n')
+ if self.version_num >= 8.0:
+ self.file.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
+ else:
+ self.file.write('\tGlobalSection(ProjectConfiguration) = postSolution\n')
+
+ for name in confkeys:
+ variant = self.configs[name].variant
+ platform = self.configs[name].platform
+ if self.version_num >= 8.0:
+ for p in self.dspfiles:
+ guid = _generateGUID(p, '')
+ self.file.write('\t\t%s.%s|%s.ActiveCfg = %s|%s\n'
+ '\t\t%s.%s|%s.Build.0 = %s|%s\n' % (guid,variant,platform,variant,platform,guid,variant,platform,variant,platform))
+ else:
+ for p in self.dspfiles:
+ guid = _generateGUID(p, '')
+ self.file.write('\t\t%s.%s.ActiveCfg = %s|%s\n'
+ '\t\t%s.%s.Build.0 = %s|%s\n' %(guid,variant,variant,platform,guid,variant,variant,platform))
+
+ self.file.write('\tEndGlobalSection\n')
+
+ if self.version_num >= 8.0:
+ self.file.write('\tGlobalSection(SolutionProperties) = preSolution\n'
+ '\t\tHideSolutionNode = FALSE\n'
+ '\tEndGlobalSection\n')
+ else:
+ self.file.write('\tGlobalSection(ExtensibilityGlobals) = postSolution\n'
+ '\tEndGlobalSection\n'
+ '\tGlobalSection(ExtensibilityAddIns) = postSolution\n'
+ '\tEndGlobalSection\n')
+ self.file.write('EndGlobal\n')
+ if self.nokeep == 0:
+ pdata = pickle.dumps(self.configs,1)
+ pdata = base64.encodestring(pdata)
+ self.file.write(pdata + '\n')
+
+ def Build(self):
+ try:
+ self.file = open(self.dswfile,'w')
+ except IOError, detail:
+ raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail)
+ else:
+ self.PrintSolution()
+ self.file.close()
+
+V6DSWHeader = """\
+Microsoft Developer Studio Workspace File, Format Version 6.00
+# WARNING: DO NOT EDIT OR DELETE THIS WORKSPACE FILE!
+
+###############################################################################
+
+Project: "%(name)s"="%(dspfile)s" - Package Owner=<4>
+
+Package=<5>
+{{{
+}}}
+
+Package=<4>
+{{{
+}}}
+
+###############################################################################
+
+Global:
+
+Package=<5>
+{{{
+}}}
+
+Package=<3>
+{{{
+}}}
+
+###############################################################################
+"""
+
+class _GenerateV6DSW(_DSWGenerator):
+ """Generates a Workspace file for MSVS 6.0"""
+
+ def PrintWorkspace(self):
+ """ writes a DSW file """
+ name = self.name
+ dspfile = self.dspfiles[0]
+ self.file.write(V6DSWHeader % locals())
+
+ def Build(self):
+ try:
+ self.file = open(self.dswfile,'w')
+ except IOError, detail:
+ raise SCons.Errors.InternalError, 'Unable to open "' + self.dswfile + '" for writing:' + str(detail)
+ else:
+ self.PrintWorkspace()
+ self.file.close()
+
+
+def GenerateDSP(dspfile, source, env):
+ """Generates a Project file based on the version of MSVS that is being used"""
+
+ version_num = 6.0
+ if env.has_key('MSVS_VERSION'):
+ version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
+ if version_num >= 7.0:
+ g = _GenerateV7DSP(dspfile, source, env)
+ g.Build()
+ else:
+ g = _GenerateV6DSP(dspfile, source, env)
+ g.Build()
+
+def GenerateDSW(dswfile, source, env):
+ """Generates a Solution/Workspace file based on the version of MSVS that is being used"""
+
+ version_num = 6.0
+ if env.has_key('MSVS_VERSION'):
+ version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
+ if version_num >= 7.0:
+ g = _GenerateV7DSW(dswfile, source, env)
+ g.Build()
+ else:
+ g = _GenerateV6DSW(dswfile, source, env)
+ g.Build()
+
+
+##############################################################################
+# Above here are the classes and functions for generation of
+# DSP/DSW/SLN/VCPROJ files.
+##############################################################################
+
+def get_default_visualstudio_version(env):
+ """Returns the version set in the env, or the latest version
+ installed, if it can find it, or '6.0' if all else fails. Also
+ updates the environment with what it found."""
+
+ versions = ['6.0']
+
+ if not env.has_key('MSVS') or not SCons.Util.is_Dict(env['MSVS']):
+ v = get_visualstudio_versions()
+ if v:
+ versions = v
+ env['MSVS'] = {'VERSIONS' : versions}
+ else:
+ versions = env['MSVS'].get('VERSIONS', versions)
+
+ if not env.has_key('MSVS_VERSION'):
+ env['MSVS_VERSION'] = versions[0] #use highest version by default
+
+ env['MSVS']['VERSION'] = env['MSVS_VERSION']
+
+ return env['MSVS_VERSION']
+
+def get_visualstudio_versions():
+ """
+ Get list of visualstudio versions from the Windows registry.
+ Returns a list of strings containing version numbers. An empty list
+ is returned if we were unable to accees the register (for example,
+ we couldn't import the registry-access module) or the appropriate
+ registry keys weren't found.
+ """
+
+ if not SCons.Util.can_read_reg:
+ return []
+
+ HLM = SCons.Util.HKEY_LOCAL_MACHINE
+ KEYS = {
+ r'Software\Microsoft\VisualStudio' : '',
+ r'Software\Microsoft\VCExpress' : 'Exp',
+ }
+ L = []
+ for K, suite_suffix in KEYS.items():
+ try:
+ k = SCons.Util.RegOpenKeyEx(HLM, K)
+ i = 0
+ while 1:
+ try:
+ p = SCons.Util.RegEnumKey(k,i)
+ except SCons.Util.RegError:
+ break
+ i = i + 1
+ if not p[0] in '123456789' or p in L:
+ continue
+ # Only add this version number if there is a valid
+ # registry structure (includes the "Setup" key),
+ # and at least some of the correct directories
+ # exist. Sometimes VS uninstall leaves around
+ # some registry/filesystem turds that we don't
+ # want to trip over. Also, some valid registry
+ # entries are MSDN entries, not MSVS ('7.1',
+ # notably), and we want to skip those too.
+ try:
+ SCons.Util.RegOpenKeyEx(HLM, K + '\\' + p + '\\Setup')
+ except SCons.Util.RegError:
+ continue
+
+ id = []
+ idk = SCons.Util.RegOpenKeyEx(HLM, K + '\\' + p)
+ # This is not always here -- it only exists if the
+ # user installed into a non-standard location (at
+ # least in VS6 it works that way -- VS7 seems to
+ # always write it)
+ try:
+ id = SCons.Util.RegQueryValueEx(idk, 'InstallDir')
+ except SCons.Util.RegError:
+ pass
+
+ # If the InstallDir key doesn't exist,
+ # then we check the default locations.
+ # Note: The IDE's executable is not devenv.exe for VS8 Express.
+ if not id or not id[0]:
+ files_dir = SCons.Platform.win32.get_program_files_dir()
+ version_num, suite = msvs_parse_version(p)
+ if version_num < 7.0:
+ vs = r'Microsoft Visual Studio\Common\MSDev98'
+ elif version_num < 8.0:
+ vs = r'Microsoft Visual Studio .NET\Common7\IDE'
+ else:
+ vs = r'Microsoft Visual Studio 8\Common7\IDE'
+ id = [ os.path.join(files_dir, vs) ]
+ if os.path.exists(id[0]):
+ L.append(p + suite_suffix)
+ except SCons.Util.RegError:
+ pass
+
+ if not L:
+ return []
+
+ # This is a hack to get around the fact that certain Visual Studio
+ # patches place a "6.1" version in the registry, which does not have
+ # any of the keys we need to find include paths, install directories,
+ # etc. Therefore we ignore it if it is there, since it throws all
+ # other logic off.
+ try:
+ L.remove("6.1")
+ except ValueError:
+ pass
+
+ L.sort()
+ L.reverse()
+
+ return L
+
+def get_default_visualstudio8_suite(env):
+ """
+ Returns the Visual Studio 2005 suite identifier set in the env, or the
+ highest suite installed.
+ """
+ if not env.has_key('MSVS') or not SCons.Util.is_Dict(env['MSVS']):
+ env['MSVS'] = {}
+
+ if env.has_key('MSVS_SUITE'):
+ # TODO(1.5)
+ #suite = env['MSVS_SUITE'].upper()
+ suite = string.upper(env['MSVS_SUITE'])
+ suites = [suite]
+ else:
+ suite = 'EXPRESS'
+ suites = [suite]
+ if SCons.Util.can_read_reg:
+ suites = get_visualstudio8_suites()
+ if suites:
+ suite = suites[0] #use best suite by default
+
+ env['MSVS_SUITE'] = suite
+ env['MSVS']['SUITES'] = suites
+ env['MSVS']['SUITE'] = suite
+
+ return suite
+
+def get_visualstudio8_suites():
+ """
+ Returns a sorted list of all installed Visual Studio 2005 suites found
+ in the registry. The highest version should be the first entry in the list.
+ """
+
+ suites = []
+
+ # Detect Standard, Professional and Team edition
+ try:
+ idk = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\VisualStudio\8.0')
+ SCons.Util.RegQueryValueEx(idk, 'InstallDir')
+ editions = { 'PRO': r'Setup\VS\Pro' } # ToDo: add standard and team editions
+ edition_name = 'STD'
+ for name, key_suffix in editions.items():
+ try:
+ idk = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\VisualStudio\8.0' + '\\' + key_suffix )
+ edition_name = name
+ except SCons.Util.RegError:
+ pass
+ suites.append(edition_name)
+ except SCons.Util.RegError:
+ pass
+
+ # Detect Express edition
+ try:
+ idk = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\VCExpress\8.0')
+ SCons.Util.RegQueryValueEx(idk, 'InstallDir')
+ suites.append('EXPRESS')
+ except SCons.Util.RegError:
+ pass
+
+ return suites
+
+def is_msvs_installed():
+ """
+ Check the registry for an installed visual studio.
+ """
+ try:
+ v = SCons.Tool.msvs.get_visualstudio_versions()
+ return v
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ return 0
+
+def get_msvs_install_dirs(version = None, vs8suite = None):
+ """
+ Get installed locations for various msvc-related products, like the .NET SDK
+ and the Platform SDK.
+ """
+
+ if not SCons.Util.can_read_reg:
+ return {}
+
+ if not version:
+ versions = get_visualstudio_versions()
+ if versions:
+ version = versions[0] #use highest version by default
+ else:
+ return {}
+
+ version_num, suite = msvs_parse_version(version)
+
+ K = 'Software\\Microsoft\\VisualStudio\\' + str(version_num)
+ if (version_num >= 8.0):
+ if vs8suite == None:
+ # We've been given no guidance about which Visual Studio 8
+ # suite to use, so attempt to autodetect.
+ suites = get_visualstudio8_suites()
+ if suites:
+ vs8suite = suites[0]
+
+ if vs8suite == 'EXPRESS':
+ K = 'Software\\Microsoft\\VCExpress\\' + str(version_num)
+
+ # vc++ install dir
+ rv = {}
+ if (version_num < 7.0):
+ key = K + r'\Setup\Microsoft Visual C++\ProductDir'
+ else:
+ key = K + r'\Setup\VC\ProductDir'
+ try:
+ (rv['VCINSTALLDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE, key)
+ except SCons.Util.RegError:
+ pass
+
+ # visual studio install dir
+ if (version_num < 7.0):
+ try:
+ (rv['VSINSTALLDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ K + r'\Setup\Microsoft Visual Studio\ProductDir')
+ except SCons.Util.RegError:
+ pass
+
+ if not rv.has_key('VSINSTALLDIR') or not rv['VSINSTALLDIR']:
+ if rv.has_key('VCINSTALLDIR') and rv['VCINSTALLDIR']:
+ rv['VSINSTALLDIR'] = os.path.dirname(rv['VCINSTALLDIR'])
+ else:
+ rv['VSINSTALLDIR'] = os.path.join(SCons.Platform.win32.get_program_files_dir(),'Microsoft Visual Studio')
+ else:
+ try:
+ (rv['VSINSTALLDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ K + r'\Setup\VS\ProductDir')
+ except SCons.Util.RegError:
+ pass
+
+ # .NET framework install dir
+ try:
+ (rv['FRAMEWORKDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\.NETFramework\InstallRoot')
+ except SCons.Util.RegError:
+ pass
+
+ if rv.has_key('FRAMEWORKDIR'):
+ # try and enumerate the installed versions of the .NET framework.
+ contents = os.listdir(rv['FRAMEWORKDIR'])
+ l = re.compile('v[0-9]+.*')
+ installed_framework_versions = filter(lambda e, l=l: l.match(e), contents)
+
+ def versrt(a,b):
+ # since version numbers aren't really floats...
+ aa = a[1:]
+ bb = b[1:]
+ aal = string.split(aa, '.')
+ bbl = string.split(bb, '.')
+ # sequence comparison in python is lexicographical
+ # which is exactly what we want.
+ # Note we sort backwards so the highest version is first.
+ return cmp(bbl,aal)
+
+ installed_framework_versions.sort(versrt)
+
+ rv['FRAMEWORKVERSIONS'] = installed_framework_versions
+
+ # TODO: allow a specific framework version to be set
+
+ # Choose a default framework version based on the Visual
+ # Studio version.
+ DefaultFrameworkVersionMap = {
+ '7.0' : 'v1.0',
+ '7.1' : 'v1.1',
+ '8.0' : 'v2.0',
+ # TODO: Does .NET 3.0 need to be worked into here somewhere?
+ }
+ try:
+ default_framework_version = DefaultFrameworkVersionMap[version[:3]]
+ except (KeyError, TypeError):
+ pass
+ else:
+ # Look for the first installed directory in FRAMEWORKDIR that
+ # begins with the framework version string that's appropriate
+ # for the Visual Studio version we're using.
+ for v in installed_framework_versions:
+ if v[:4] == default_framework_version:
+ rv['FRAMEWORKVERSION'] = v
+ break
+
+ # If the framework version couldn't be worked out by the previous
+ # code then fall back to using the latest version of the .NET
+ # framework
+ if not rv.has_key('FRAMEWORKVERSION'):
+ rv['FRAMEWORKVERSION'] = installed_framework_versions[0]
+
+ # .NET framework SDK install dir
+ if rv.has_key('FRAMEWORKVERSION'):
+ # The .NET SDK version used must match the .NET version used,
+ # so we deliberately don't fall back to other .NET framework SDK
+ # versions that might be present.
+ ver = rv['FRAMEWORKVERSION'][:4]
+ key = r'Software\Microsoft\.NETFramework\sdkInstallRoot' + ver
+ try:
+ (rv['FRAMEWORKSDKDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ key)
+ except SCons.Util.RegError:
+ pass
+
+ # MS Platform SDK dir
+ try:
+ (rv['PLATFORMSDKDIR'], t) = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'Software\Microsoft\MicrosoftSDK\Directories\Install Dir')
+ except SCons.Util.RegError:
+ pass
+
+ if rv.has_key('PLATFORMSDKDIR'):
+ # if we have a platform SDK, try and get some info on it.
+ vers = {}
+ try:
+ loc = r'Software\Microsoft\MicrosoftSDK\InstalledSDKs'
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,loc)
+ i = 0
+ while 1:
+ try:
+ key = SCons.Util.RegEnumKey(k,i)
+ sdk = SCons.Util.RegOpenKeyEx(k,key)
+ j = 0
+ name = ''
+ date = ''
+ version = ''
+ while 1:
+ try:
+ (vk,vv,t) = SCons.Util.RegEnumValue(sdk,j)
+ # TODO(1.5):
+ #if vk.lower() == 'keyword':
+ # name = vv
+ #if vk.lower() == 'propagation_date':
+ # date = vv
+ #if vk.lower() == 'version':
+ # version = vv
+ if string.lower(vk) == 'keyword':
+ name = vv
+ if string.lower(vk) == 'propagation_date':
+ date = vv
+ if string.lower(vk) == 'version':
+ version = vv
+ j = j + 1
+ except SCons.Util.RegError:
+ break
+ if name:
+ vers[name] = (date, version)
+ i = i + 1
+ except SCons.Util.RegError:
+ break
+ rv['PLATFORMSDK_MODULES'] = vers
+ except SCons.Util.RegError:
+ pass
+
+ return rv
+
+def GetMSVSProjectSuffix(target, source, env, for_signature):
+ return env['MSVS']['PROJECTSUFFIX']
+
+def GetMSVSSolutionSuffix(target, source, env, for_signature):
+ return env['MSVS']['SOLUTIONSUFFIX']
+
+def GenerateProject(target, source, env):
+ # generate the dsp file, according to the version of MSVS.
+ builddspfile = target[0]
+ dspfile = builddspfile.srcnode()
+
+ # this detects whether or not we're using a VariantDir
+ if not dspfile is builddspfile:
+ try:
+ bdsp = open(str(builddspfile), "w+")
+ except IOError, detail:
+ print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n'
+ raise
+
+ bdsp.write("This is just a placeholder file.\nThe real project file is here:\n%s\n" % dspfile.get_abspath())
+
+ GenerateDSP(dspfile, source, env)
+
+ if env.get('auto_build_solution', 1):
+ builddswfile = target[1]
+ dswfile = builddswfile.srcnode()
+
+ if not dswfile is builddswfile:
+
+ try:
+ bdsw = open(str(builddswfile), "w+")
+ except IOError, detail:
+ print 'Unable to open "' + str(dspfile) + '" for writing:',detail,'\n'
+ raise
+
+ bdsw.write("This is just a placeholder file.\nThe real workspace file is here:\n%s\n" % dswfile.get_abspath())
+
+ GenerateDSW(dswfile, source, env)
+
+def GenerateSolution(target, source, env):
+ GenerateDSW(target[0], source, env)
+
+def projectEmitter(target, source, env):
+ """Sets up the DSP dependencies."""
+
+ # todo: Not sure what sets source to what user has passed as target,
+ # but this is what happens. When that is fixed, we also won't have
+ # to make the user always append env['MSVSPROJECTSUFFIX'] to target.
+ if source[0] == target[0]:
+ source = []
+
+ # make sure the suffix is correct for the version of MSVS we're running.
+ (base, suff) = SCons.Util.splitext(str(target[0]))
+ suff = env.subst('$MSVSPROJECTSUFFIX')
+ target[0] = base + suff
+
+ if not source:
+ source = 'prj_inputs:'
+ source = source + env.subst('$MSVSSCONSCOM', 1)
+ source = source + env.subst('$MSVSENCODING', 1)
+
+ if env.has_key('buildtarget') and env['buildtarget'] != None:
+ if SCons.Util.is_String(env['buildtarget']):
+ source = source + ' "%s"' % env['buildtarget']
+ elif SCons.Util.is_List(env['buildtarget']):
+ for bt in env['buildtarget']:
+ if SCons.Util.is_String(bt):
+ source = source + ' "%s"' % bt
+ else:
+ try: source = source + ' "%s"' % bt.get_abspath()
+ except AttributeError: raise SCons.Errors.InternalError, \
+ "buildtarget can be a string, a node, a list of strings or nodes, or None"
+ else:
+ try: source = source + ' "%s"' % env['buildtarget'].get_abspath()
+ except AttributeError: raise SCons.Errors.InternalError, \
+ "buildtarget can be a string, a node, a list of strings or nodes, or None"
+
+ if env.has_key('outdir') and env['outdir'] != None:
+ if SCons.Util.is_String(env['outdir']):
+ source = source + ' "%s"' % env['outdir']
+ elif SCons.Util.is_List(env['outdir']):
+ for s in env['outdir']:
+ if SCons.Util.is_String(s):
+ source = source + ' "%s"' % s
+ else:
+ try: source = source + ' "%s"' % s.get_abspath()
+ except AttributeError: raise SCons.Errors.InternalError, \
+ "outdir can be a string, a node, a list of strings or nodes, or None"
+ else:
+ try: source = source + ' "%s"' % env['outdir'].get_abspath()
+ except AttributeError: raise SCons.Errors.InternalError, \
+ "outdir can be a string, a node, a list of strings or nodes, or None"
+
+ if env.has_key('name'):
+ if SCons.Util.is_String(env['name']):
+ source = source + ' "%s"' % env['name']
+ else:
+ raise SCons.Errors.InternalError, "name must be a string"
+
+ if env.has_key('variant'):
+ if SCons.Util.is_String(env['variant']):
+ source = source + ' "%s"' % env['variant']
+ elif SCons.Util.is_List(env['variant']):
+ for variant in env['variant']:
+ if SCons.Util.is_String(variant):
+ source = source + ' "%s"' % variant
+ else:
+ raise SCons.Errors.InternalError, "name must be a string or a list of strings"
+ else:
+ raise SCons.Errors.InternalError, "variant must be a string or a list of strings"
+ else:
+ raise SCons.Errors.InternalError, "variant must be specified"
+
+ for s in _DSPGenerator.srcargs:
+ if env.has_key(s):
+ if SCons.Util.is_String(env[s]):
+ source = source + ' "%s' % env[s]
+ elif SCons.Util.is_List(env[s]):
+ for t in env[s]:
+ if SCons.Util.is_String(t):
+ source = source + ' "%s"' % t
+ else:
+ raise SCons.Errors.InternalError, s + " must be a string or a list of strings"
+ else:
+ raise SCons.Errors.InternalError, s + " must be a string or a list of strings"
+
+ source = source + ' "%s"' % str(target[0])
+ source = [SCons.Node.Python.Value(source)]
+
+ targetlist = [target[0]]
+ sourcelist = source
+
+ if env.get('auto_build_solution', 1):
+ env['projects'] = targetlist
+ t, s = solutionEmitter(target, target, env)
+ targetlist = targetlist + t
+
+ return (targetlist, sourcelist)
+
+def solutionEmitter(target, source, env):
+ """Sets up the DSW dependencies."""
+
+ # todo: Not sure what sets source to what user has passed as target,
+ # but this is what happens. When that is fixed, we also won't have
+ # to make the user always append env['MSVSSOLUTIONSUFFIX'] to target.
+ if source[0] == target[0]:
+ source = []
+
+ # make sure the suffix is correct for the version of MSVS we're running.
+ (base, suff) = SCons.Util.splitext(str(target[0]))
+ suff = env.subst('$MSVSSOLUTIONSUFFIX')
+ target[0] = base + suff
+
+ if not source:
+ source = 'sln_inputs:'
+
+ if env.has_key('name'):
+ if SCons.Util.is_String(env['name']):
+ source = source + ' "%s"' % env['name']
+ else:
+ raise SCons.Errors.InternalError, "name must be a string"
+
+ if env.has_key('variant'):
+ if SCons.Util.is_String(env['variant']):
+ source = source + ' "%s"' % env['variant']
+ elif SCons.Util.is_List(env['variant']):
+ for variant in env['variant']:
+ if SCons.Util.is_String(variant):
+ source = source + ' "%s"' % variant
+ else:
+ raise SCons.Errors.InternalError, "name must be a string or a list of strings"
+ else:
+ raise SCons.Errors.InternalError, "variant must be a string or a list of strings"
+ else:
+ raise SCons.Errors.InternalError, "variant must be specified"
+
+ if env.has_key('slnguid'):
+ if SCons.Util.is_String(env['slnguid']):
+ source = source + ' "%s"' % env['slnguid']
+ else:
+ raise SCons.Errors.InternalError, "slnguid must be a string"
+
+ if env.has_key('projects'):
+ if SCons.Util.is_String(env['projects']):
+ source = source + ' "%s"' % env['projects']
+ elif SCons.Util.is_List(env['projects']):
+ for t in env['projects']:
+ if SCons.Util.is_String(t):
+ source = source + ' "%s"' % t
+
+ source = source + ' "%s"' % str(target[0])
+ source = [SCons.Node.Python.Value(source)]
+
+ return ([target[0]], source)
+
+projectAction = SCons.Action.Action(GenerateProject, None)
+
+solutionAction = SCons.Action.Action(GenerateSolution, None)
+
+projectBuilder = SCons.Builder.Builder(action = '$MSVSPROJECTCOM',
+ suffix = '$MSVSPROJECTSUFFIX',
+ emitter = projectEmitter)
+
+solutionBuilder = SCons.Builder.Builder(action = '$MSVSSOLUTIONCOM',
+ suffix = '$MSVSSOLUTIONSUFFIX',
+ emitter = solutionEmitter)
+
+default_MSVS_SConscript = None
+
+def generate(env):
+ """Add Builders and construction variables for Microsoft Visual
+ Studio project files to an Environment."""
+ try:
+ env['BUILDERS']['MSVSProject']
+ except KeyError:
+ env['BUILDERS']['MSVSProject'] = projectBuilder
+
+ try:
+ env['BUILDERS']['MSVSSolution']
+ except KeyError:
+ env['BUILDERS']['MSVSSolution'] = solutionBuilder
+
+ env['MSVSPROJECTCOM'] = projectAction
+ env['MSVSSOLUTIONCOM'] = solutionAction
+
+ if SCons.Script.call_stack:
+ # XXX Need to find a way to abstract this; the build engine
+ # shouldn't depend on anything in SCons.Script.
+ env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript
+ else:
+ global default_MSVS_SConscript
+ if default_MSVS_SConscript is None:
+ default_MSVS_SConscript = env.File('SConstruct')
+ env['MSVSSCONSCRIPT'] = default_MSVS_SConscript
+
+ env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env))
+ env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.abspath}" -f ${MSVSSCONSCRIPT.name}'
+ env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS'
+ env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
+ env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"'
+ env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"'
+ env['MSVSENCODING'] = 'Windows-1252'
+
+ try:
+ version = get_default_visualstudio_version(env)
+ # keep a record of some of the MSVS info so the user can use it.
+ dirs = get_msvs_install_dirs(version)
+ env['MSVS'].update(dirs)
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ # we don't care if we can't do this -- if we can't, it's
+ # because we don't have access to the registry, or because the
+ # tools aren't installed. In either case, the user will have to
+ # find them on their own.
+ pass
+
+ version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
+ if (version_num < 7.0):
+ env['MSVS']['PROJECTSUFFIX'] = '.dsp'
+ env['MSVS']['SOLUTIONSUFFIX'] = '.dsw'
+ else:
+ env['MSVS']['PROJECTSUFFIX'] = '.vcproj'
+ env['MSVS']['SOLUTIONSUFFIX'] = '.sln'
+
+ env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix
+ env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix
+ env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}'
+ env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}'
+ env['SCONS_HOME'] = os.environ.get('SCONS_HOME')
+
+def exists(env):
+ if not env['PLATFORM'] in ('win32', 'cygwin'):
+ return 0
+
+ try:
+ v = SCons.Tool.msvs.get_visualstudio_versions()
+ except (SCons.Util.RegError, SCons.Errors.InternalError):
+ pass
+
+ if not v:
+ version_num = 6.0
+ if env.has_key('MSVS_VERSION'):
+ version_num, suite = msvs_parse_version(env['MSVS_VERSION'])
+ if version_num >= 7.0:
+ # The executable is 'devenv' in Visual Studio Pro,
+ # Team System and others. Express Editions have different
+ # executable names. Right now we're only going to worry
+ # about Visual C++ 2005 Express Edition.
+ return env.Detect('devenv') or env.Detect('vcexpress')
+ else:
+ return env.Detect('msdev')
+ else:
+ # there's at least one version of MSVS installed.
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/mwcc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/mwcc.py
new file mode 100644
index 000000000..4a6eaaaf1
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/mwcc.py
@@ -0,0 +1,202 @@
+"""SCons.Tool.mwcc
+
+Tool-specific initialization for the Metrowerks CodeWarrior compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/mwcc.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+import string
+
+import SCons.Util
+
+def set_vars(env):
+ """Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
+
+ MWCW_VERSIONS is set to a list of objects representing installed versions
+
+ MWCW_VERSION is set to the version object that will be used for building.
+ MWCW_VERSION can be set to a string during Environment
+ construction to influence which version is chosen, otherwise
+ the latest one from MWCW_VERSIONS is used.
+
+ Returns true if at least one version is found, false otherwise
+ """
+ desired = env.get('MWCW_VERSION', '')
+
+ # return right away if the variables are already set
+ if isinstance(desired, MWVersion):
+ return 1
+ elif desired is None:
+ return 0
+
+ versions = find_versions()
+ version = None
+
+ if desired:
+ for v in versions:
+ if str(v) == desired:
+ version = v
+ elif versions:
+ version = versions[-1]
+
+ env['MWCW_VERSIONS'] = versions
+ env['MWCW_VERSION'] = version
+
+ if version is None:
+ return 0
+
+ env.PrependENVPath('PATH', version.clpath)
+ env.PrependENVPath('PATH', version.dllpath)
+ ENV = env['ENV']
+ ENV['CWFolder'] = version.path
+ ENV['LM_LICENSE_FILE'] = version.license
+ plus = lambda x: '+%s' % x
+ ENV['MWCIncludes'] = string.join(map(plus, version.includes), os.pathsep)
+ ENV['MWLibraries'] = string.join(map(plus, version.libs), os.pathsep)
+ return 1
+
+
+def find_versions():
+ """Return a list of MWVersion objects representing installed versions"""
+ versions = []
+
+ ### This function finds CodeWarrior by reading from the registry on
+ ### Windows. Some other method needs to be implemented for other
+ ### platforms, maybe something that calls env.WhereIs('mwcc')
+
+ if SCons.Util.can_read_reg:
+ try:
+ HLM = SCons.Util.HKEY_LOCAL_MACHINE
+ product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
+ product_key = SCons.Util.RegOpenKeyEx(HLM, product)
+
+ i = 0
+ while 1:
+ name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
+ name_key = SCons.Util.RegOpenKeyEx(HLM, name)
+
+ try:
+ version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
+ path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
+ mwv = MWVersion(version[0], path[0], 'Win32-X86')
+ versions.append(mwv)
+ except SCons.Util.RegError:
+ pass
+
+ i = i + 1
+
+ except SCons.Util.RegError:
+ pass
+
+ return versions
+
+
+class MWVersion:
+ def __init__(self, version, path, platform):
+ self.version = version
+ self.path = path
+ self.platform = platform
+ self.clpath = os.path.join(path, 'Other Metrowerks Tools',
+ 'Command Line Tools')
+ self.dllpath = os.path.join(path, 'Bin')
+
+ # The Metrowerks tools don't store any configuration data so they
+ # are totally dumb when it comes to locating standard headers,
+ # libraries, and other files, expecting all the information
+ # to be handed to them in environment variables. The members set
+ # below control what information scons injects into the environment
+
+ ### The paths below give a normal build environment in CodeWarrior for
+ ### Windows, other versions of CodeWarrior might need different paths.
+
+ msl = os.path.join(path, 'MSL')
+ support = os.path.join(path, '%s Support' % platform)
+
+ self.license = os.path.join(path, 'license.dat')
+ self.includes = [msl, support]
+ self.libs = [msl, support]
+
+ def __str__(self):
+ return self.version
+
+
+CSuffixes = ['.c', '.C']
+CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++']
+
+
+def generate(env):
+ """Add Builders and construction variables for the mwcc to an Environment."""
+ import SCons.Defaults
+ import SCons.Tool
+
+ set_vars(env)
+
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in CSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
+
+ for suffix in CXXSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.CXXAction)
+ shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
+
+ env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
+
+ env['CC'] = 'mwcc'
+ env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
+
+ env['CXX'] = 'mwcc'
+ env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
+
+ env['SHCC'] = '$CC'
+ env['SHCCFLAGS'] = '$CCFLAGS'
+ env['SHCFLAGS'] = '$CFLAGS'
+ env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
+
+ env['SHCXX'] = '$CXX'
+ env['SHCXXFLAGS'] = '$CXXFLAGS'
+ env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
+
+ env['CFILESUFFIX'] = '.c'
+ env['CXXFILESUFFIX'] = '.cpp'
+ env['CPPDEFPREFIX'] = '-D'
+ env['CPPDEFSUFFIX'] = ''
+ env['INCPREFIX'] = '-I'
+ env['INCSUFFIX'] = ''
+
+ #env['PCH'] = ?
+ #env['PCHSTOP'] = ?
+
+
+def exists(env):
+ return set_vars(env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/mwld.py b/tools/scons/scons-local-1.2.0/SCons/Tool/mwld.py
new file mode 100644
index 000000000..890b6d09f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/mwld.py
@@ -0,0 +1,101 @@
+"""SCons.Tool.mwld
+
+Tool-specific initialization for the Metrowerks CodeWarrior linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/mwld.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool
+
+
+def generate(env):
+ """Add Builders and construction variables for lib to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+ SCons.Tool.createSharedLibBuilder(env)
+ SCons.Tool.createProgBuilder(env)
+
+ env['AR'] = 'mwld'
+ env['ARCOM'] = '$AR $ARFLAGS -library -o $TARGET $SOURCES'
+
+ env['LIBDIRPREFIX'] = '-L'
+ env['LIBDIRSUFFIX'] = ''
+ env['LIBLINKPREFIX'] = '-l'
+ env['LIBLINKSUFFIX'] = '.lib'
+
+ env['LINK'] = 'mwld'
+ env['LINKCOM'] = '$LINK $LINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
+
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = '$LINKFLAGS'
+ env['SHLINKCOM'] = shlib_action
+ env['SHLIBEMITTER']= shlib_emitter
+
+
+def exists(env):
+ import SCons.Tool.mwcc
+ return SCons.Tool.mwcc.set_vars(env)
+
+
+def shlib_generator(target, source, env, for_signature):
+ cmd = ['$SHLINK', '$SHLINKFLAGS', '-shared']
+
+ no_import_lib = env.get('no_import_lib', 0)
+ if no_import_lib: cmd.extend('-noimplib')
+
+ dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
+ if dll: cmd.extend(['-o', dll])
+
+ implib = env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX')
+ if implib: cmd.extend(['-implib', implib.get_string(for_signature)])
+
+ cmd.extend(['$SOURCES', '$_LIBDIRFLAGS', '$_LIBFLAGS'])
+
+ return [cmd]
+
+
+def shlib_emitter(target, source, env):
+ dll = env.FindIxes(target, 'SHLIBPREFIX', 'SHLIBSUFFIX')
+ no_import_lib = env.get('no_import_lib', 0)
+
+ if not dll:
+ raise SCons.Errors.UserError, "A shared library should have exactly one target with the suffix: %s" % env.subst("$SHLIBSUFFIX")
+
+ if not no_import_lib and \
+ not env.FindIxes(target, 'LIBPREFIX', 'LIBSUFFIX'):
+
+ # Append an import library to the list of targets.
+ target.append(env.ReplaceIxes(dll,
+ 'SHLIBPREFIX', 'SHLIBSUFFIX',
+ 'LIBPREFIX', 'LIBSUFFIX'))
+
+ return target, source
+
+
+shlib_action = SCons.Action.Action(shlib_generator, generator=1)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/nasm.py b/tools/scons/scons-local-1.2.0/SCons/Tool/nasm.py
new file mode 100644
index 000000000..db5c1075d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/nasm.py
@@ -0,0 +1,66 @@
+"""SCons.Tool.nasm
+
+Tool-specific initialization for nasm, the famous Netwide Assembler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/nasm.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+ASSuffixes = ['.s', '.asm', '.ASM']
+ASPPSuffixes = ['.spp', '.SPP', '.sx']
+if SCons.Util.case_sensitive_suffixes('.s', '.S'):
+ ASPPSuffixes.extend(['.S'])
+else:
+ ASSuffixes.extend(['.S'])
+
+def generate(env):
+ """Add Builders and construction variables for nasm to an Environment."""
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+
+ for suffix in ASSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+
+ for suffix in ASPPSuffixes:
+ static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
+ static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
+
+ env['AS'] = 'nasm'
+ env['ASFLAGS'] = SCons.Util.CLVar('')
+ env['ASPPFLAGS'] = '$ASFLAGS'
+ env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
+ env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
+
+def exists(env):
+ return env.Detect('nasm')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/__init__.py
new file mode 100644
index 000000000..f5e313cf0
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/__init__.py
@@ -0,0 +1,306 @@
+"""SCons.Tool.Packaging
+
+SCons Packaging Tool.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Environment
+from SCons.Variables import *
+from SCons.Errors import *
+from SCons.Util import is_List, make_path_relative
+from SCons.Warnings import warn, Warning
+
+import os, imp
+import SCons.Defaults
+
+__all__ = [ 'src_targz', 'src_tarbz2', 'src_zip', 'tarbz2', 'targz', 'zip', 'rpm', 'msi', 'ipk' ]
+
+#
+# Utility and Builder function
+#
+def Tag(env, target, source, *more_tags, **kw_tags):
+ """ Tag a file with the given arguments, just sets the accordingly named
+ attribute on the file object.
+
+ TODO: FIXME
+ """
+ if not target:
+ target=source
+ first_tag=None
+ else:
+ first_tag=source
+
+ if first_tag:
+ kw_tags[first_tag[0]] = ''
+
+ if len(kw_tags) == 0 and len(more_tags) == 0:
+ raise UserError, "No tags given."
+
+ # XXX: sanity checks
+ for x in more_tags:
+ kw_tags[x] = ''
+
+ if not SCons.Util.is_List(target):
+ target=[target]
+ else:
+ # hmm, sometimes the target list, is a list of a list
+ # make sure it is flattened prior to processing.
+ # TODO: perhaps some bug ?!?
+ target=env.Flatten(target)
+
+ for t in target:
+ for (k,v) in kw_tags.items():
+ # all file tags have to start with PACKAGING_, so we can later
+ # differentiate between "normal" object attributes and the
+ # packaging attributes. As the user should not be bothered with
+ # that, the prefix will be added here if missing.
+ #if not k.startswith('PACKAGING_'):
+ if k[:10] != 'PACKAGING_':
+ k='PACKAGING_'+k
+ setattr(t, k, v)
+
+def Package(env, target=None, source=None, **kw):
+ """ Entry point for the package tool.
+ """
+ # check if we need to find the source files ourself
+ if not source:
+ source = env.FindInstalledFiles()
+
+ if len(source)==0:
+ raise UserError, "No source for Package() given"
+
+ # decide which types of packages shall be built. Can be defined through
+ # four mechanisms: command line argument, keyword argument,
+ # environment argument and default selection( zip or tar.gz ) in that
+ # order.
+ try: kw['PACKAGETYPE']=env['PACKAGETYPE']
+ except KeyError: pass
+
+ if not kw.get('PACKAGETYPE'):
+ from SCons.Script import GetOption
+ kw['PACKAGETYPE'] = GetOption('package_type')
+
+ if kw['PACKAGETYPE'] == None:
+ if env['BUILDERS'].has_key('Tar'):
+ kw['PACKAGETYPE']='targz'
+ elif env['BUILDERS'].has_key('Zip'):
+ kw['PACKAGETYPE']='zip'
+ else:
+ raise UserError, "No type for Package() given"
+
+ PACKAGETYPE=kw['PACKAGETYPE']
+ if not is_List(PACKAGETYPE):
+ PACKAGETYPE=string.split(PACKAGETYPE, ',')
+
+ # load the needed packagers.
+ def load_packager(type):
+ try:
+ file,path,desc=imp.find_module(type, __path__)
+ return imp.load_module(type, file, path, desc)
+ except ImportError, e:
+ raise EnvironmentError("packager %s not available: %s"%(type,str(e)))
+
+ packagers=map(load_packager, PACKAGETYPE)
+
+ # set up targets and the PACKAGEROOT
+ try:
+ # fill up the target list with a default target name until the PACKAGETYPE
+ # list is of the same size as the target list.
+ if not target: target = []
+
+ size_diff = len(PACKAGETYPE)-len(target)
+ default_name = "%(NAME)s-%(VERSION)s"
+
+ if size_diff>0:
+ default_target = default_name%kw
+ target.extend( [default_target]*size_diff )
+
+ if not kw.has_key('PACKAGEROOT'):
+ kw['PACKAGEROOT'] = default_name%kw
+
+ except KeyError, e:
+ raise SCons.Errors.UserError( "Missing Packagetag '%s'"%e.args[0] )
+
+ # setup the source files
+ source=env.arg2nodes(source, env.fs.Entry)
+
+ # call the packager to setup the dependencies.
+ targets=[]
+ try:
+ for packager in packagers:
+ t=[target.pop(0)]
+ t=apply(packager.package, [env,t,source], kw)
+ targets.extend(t)
+
+ assert( len(target) == 0 )
+
+ except KeyError, e:
+ raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\
+ % (e.args[0],packager.__name__) )
+ except TypeError, e:
+ # this exception means that a needed argument for the packager is
+ # missing. As our packagers get their "tags" as named function
+ # arguments we need to find out which one is missing.
+ from inspect import getargspec
+ args,varargs,varkw,defaults=getargspec(packager.package)
+ if defaults!=None:
+ args=args[:-len(defaults)] # throw away arguments with default values
+ map(args.remove, 'env target source'.split())
+ # now remove any args for which we have a value in kw.
+ #args=[x for x in args if not kw.has_key(x)]
+ args=filter(lambda x, kw=kw: not kw.has_key(x), args)
+
+ if len(args)==0:
+ raise # must be a different error, so reraise
+ elif len(args)==1:
+ raise SCons.Errors.UserError( "Missing Packagetag '%s' for %s packager"\
+ % (args[0],packager.__name__) )
+ else:
+ raise SCons.Errors.UserError( "Missing Packagetags '%s' for %s packager"\
+ % (", ".join(args),packager.__name__) )
+
+ target=env.arg2nodes(target, env.fs.Entry)
+ targets.extend(env.Alias( 'package', targets ))
+ return targets
+
+#
+# SCons tool initialization functions
+#
+
+added = None
+
+def generate(env):
+ from SCons.Script import AddOption
+ global added
+ if not added:
+ added = 1
+ AddOption('--package-type',
+ dest='package_type',
+ default=None,
+ type="string",
+ action="store",
+ help='The type of package to create.')
+
+ try:
+ env['BUILDERS']['Package']
+ env['BUILDERS']['Tag']
+ except KeyError:
+ env['BUILDERS']['Package'] = Package
+ env['BUILDERS']['Tag'] = Tag
+
+def exists(env):
+ return 1
+
+# XXX
+def options(opts):
+ opts.AddVariables(
+ EnumVariable( 'PACKAGETYPE',
+ 'the type of package to create.',
+ None, allowed_values=map( str, __all__ ),
+ ignorecase=2
+ )
+ )
+
+#
+# Internal utility functions
+#
+
+def copy_attr(f1, f2):
+ """ copies the special packaging file attributes from f1 to f2.
+ """
+ #pattrs = [x for x in dir(f1) if not hasattr(f2, x) and\
+ # x.startswith('PACKAGING_')]
+ copyit = lambda x, f2=f2: not hasattr(f2, x) and x[:10] == 'PACKAGING_'
+ pattrs = filter(copyit, dir(f1))
+ for attr in pattrs:
+ setattr(f2, attr, getattr(f1, attr))
+def putintopackageroot(target, source, env, pkgroot, honor_install_location=1):
+ """ Uses the CopyAs builder to copy all source files to the directory given
+ in pkgroot.
+
+ If honor_install_location is set and the copied source file has an
+ PACKAGING_INSTALL_LOCATION attribute, the PACKAGING_INSTALL_LOCATION is
+ used as the new name of the source file under pkgroot.
+
+ The source file will not be copied if it is already under the the pkgroot
+ directory.
+
+ All attributes of the source file will be copied to the new file.
+ """
+ # make sure the packageroot is a Dir object.
+ if SCons.Util.is_String(pkgroot): pkgroot=env.Dir(pkgroot)
+ if not SCons.Util.is_List(source): source=[source]
+
+ new_source = []
+ for file in source:
+ if SCons.Util.is_String(file): file = env.File(file)
+
+ if file.is_under(pkgroot):
+ new_source.append(file)
+ else:
+ if hasattr(file, 'PACKAGING_INSTALL_LOCATION') and\
+ honor_install_location:
+ new_name=make_path_relative(file.PACKAGING_INSTALL_LOCATION)
+ else:
+ new_name=make_path_relative(file.get_path())
+
+ new_file=pkgroot.File(new_name)
+ new_file=env.CopyAs(new_file, file)[0]
+ copy_attr(file, new_file)
+ new_source.append(new_file)
+
+ return (target, new_source)
+
+def stripinstallbuilder(target, source, env):
+ """ strips the install builder action from the source list and stores
+ the final installation location as the "PACKAGING_INSTALL_LOCATION" of
+ the source of the source file. This effectively removes the final installed
+ files from the source list while remembering the installation location.
+
+ It also warns about files which have no install builder attached.
+ """
+ def has_no_install_location(file):
+ return not (file.has_builder() and\
+ hasattr(file.builder, 'name') and\
+ (file.builder.name=="InstallBuilder" or\
+ file.builder.name=="InstallAsBuilder"))
+
+ if len(filter(has_no_install_location, source)):
+ warn(Warning, "there are files to package which have no\
+ InstallBuilder attached, this might lead to irreproducible packages")
+
+ n_source=[]
+ for s in source:
+ if has_no_install_location(s):
+ n_source.append(s)
+ else:
+ for ss in s.sources:
+ n_source.append(ss)
+ copy_attr(s, ss)
+ setattr(ss, 'PACKAGING_INSTALL_LOCATION', s.get_path())
+
+ return (target, n_source)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/ipk.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/ipk.py
new file mode 100644
index 000000000..0a9c6b9da
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/ipk.py
@@ -0,0 +1,179 @@
+"""SCons.Tool.Packaging.ipk
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/ipk.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Builder
+import SCons.Node.FS
+import os
+
+from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION,
+ SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL,
+ X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw):
+ """ this function prepares the packageroot directory for packaging with the
+ ipkg builder.
+ """
+ SCons.Tool.Tool('ipkg').generate(env)
+
+ # setup the Ipkg builder
+ bld = env['BUILDERS']['Ipkg']
+ target, source = stripinstallbuilder(target, source, env)
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT)
+
+ # This should be overridable from the construction environment,
+ # which it is by using ARCHITECTURE=.
+ # Guessing based on what os.uname() returns at least allows it
+ # to work for both i386 and x86_64 Linux systems.
+ archmap = {
+ 'i686' : 'i386',
+ 'i586' : 'i386',
+ 'i486' : 'i386',
+ }
+
+ buildarchitecture = os.uname()[4]
+ buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
+
+ if kw.has_key('ARCHITECTURE'):
+ buildarchitecture = kw['ARCHITECTURE']
+
+ # setup the kw to contain the mandatory arguments to this fucntion.
+ # do this before calling any builder or setup function
+ loc=locals()
+ del loc['kw']
+ kw.update(loc)
+ del kw['source'], kw['target'], kw['env']
+
+ # generate the specfile
+ specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw)
+
+ # override the default target.
+ if str(target[0])=="%s-%s"%(NAME, VERSION):
+ target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ]
+
+ # now apply the Ipkg builder
+ return apply(bld, [env, target, specfile], kw)
+
+def gen_ipk_dir(proot, source, env, kw):
+ # make sure the packageroot is a Dir object.
+ if SCons.Util.is_String(proot): proot=env.Dir(proot)
+
+ # create the specfile builder
+ s_bld=SCons.Builder.Builder(
+ action = build_specfiles,
+ )
+
+ # create the specfile targets
+ spec_target=[]
+ control=proot.Dir('CONTROL')
+ spec_target.append(control.File('control'))
+ spec_target.append(control.File('conffiles'))
+ spec_target.append(control.File('postrm'))
+ spec_target.append(control.File('prerm'))
+ spec_target.append(control.File('postinst'))
+ spec_target.append(control.File('preinst'))
+
+ # apply the builder to the specfile targets
+ apply(s_bld, [env, spec_target, source], kw)
+
+ # the packageroot directory does now contain the specfiles.
+ return proot
+
+def build_specfiles(source, target, env):
+ """ filter the targets for the needed files and use the variables in env
+ to create the specfile.
+ """
+ #
+ # At first we care for the CONTROL/control file, which is the main file for ipk.
+ #
+ # For this we need to open multiple files in random order, so we store into
+ # a dict so they can be easily accessed.
+ #
+ #
+ opened_files={}
+ def open_file(needle, haystack):
+ try:
+ return opened_files[needle]
+ except KeyError:
+ file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0]
+ opened_files[needle]=open(file.abspath, 'w')
+ return opened_files[needle]
+
+ control_file=open_file('control', target)
+
+ if not env.has_key('X_IPK_DESCRIPTION'):
+ env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'],
+ env['DESCRIPTION'].replace('\n', '\n '))
+
+
+ content = """
+Package: $NAME
+Version: $VERSION
+Priority: $X_IPK_PRIORITY
+Section: $X_IPK_SECTION
+Source: $SOURCE_URL
+Architecture: $ARCHITECTURE
+Maintainer: $X_IPK_MAINTAINER
+Depends: $X_IPK_DEPENDS
+Description: $X_IPK_DESCRIPTION
+"""
+
+ control_file.write(env.subst(content))
+
+ #
+ # now handle the various other files, which purpose it is to set post-,
+ # pre-scripts and mark files as config files.
+ #
+ # We do so by filtering the source files for files which are marked with
+ # the "config" tag and afterwards we do the same for x_ipk_postrm,
+ # x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags.
+ #
+ # The first one will write the name of the file into the file
+ # CONTROL/configfiles, the latter add the content of the x_ipk_* variable
+ # into the same named file.
+ #
+ for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]:
+ config=open_file('conffiles')
+ config.write(f.PACKAGING_INSTALL_LOCATION)
+ config.write('\n')
+
+ for str in 'POSTRM PRERM POSTINST PREINST'.split():
+ name="PACKAGING_X_IPK_%s"%str
+ for f in [x for x in source if name in dir(x)]:
+ file=open_file(name)
+ file.write(env[str])
+
+ #
+ # close all opened files
+ for f in opened_files.values():
+ f.close()
+
+ # call a user specified function
+ if env.has_key('CHANGE_SPECFILE'):
+ content += env['CHANGE_SPECFILE'](target)
+
+ return 0
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/msi.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/msi.py
new file mode 100644
index 000000000..4929f812f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/msi.py
@@ -0,0 +1,521 @@
+"""SCons.Tool.packaging.msi
+
+The msi packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/msi.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import SCons
+from SCons.Action import Action
+from SCons.Builder import Builder
+
+from xml.dom.minidom import *
+from xml.sax.saxutils import escape
+
+from SCons.Tool.packaging import stripinstallbuilder
+
+#
+# Utility functions
+#
+def convert_to_id(s, id_set):
+ """ Some parts of .wxs need an Id attribute (for example: The File and
+ Directory directives. The charset is limited to A-Z, a-z, digits,
+ underscores, periods. Each Id must begin with a letter or with a
+ underscore. Google for "CNDL0015" for information about this.
+
+ Requirements:
+ * the string created must only contain chars from the target charset.
+ * the string created must have a minimal editing distance from the
+ original string.
+ * the string created must be unique for the whole .wxs file.
+
+ Observation:
+ * There are 62 chars in the charset.
+
+ Idea:
+ * filter out forbidden characters. Check for a collision with the help
+ of the id_set. Add the number of the number of the collision at the
+ end of the created string. Furthermore care for a correct start of
+ the string.
+ """
+ charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz0123456789_.'
+ if s[0] in '0123456789.':
+ s += '_'+s
+ id = filter( lambda c : c in charset, s )
+
+ # did we already generate an id for this file?
+ try:
+ return id_set[id][s]
+ except KeyError:
+ # no we did not so initialize with the id
+ if not id_set.has_key(id): id_set[id] = { s : id }
+ # there is a collision, generate an id which is unique by appending
+ # the collision number
+ else: id_set[id][s] = id + str(len(id_set[id]))
+
+ return id_set[id][s]
+
+def is_dos_short_file_name(file):
+ """ examine if the given file is in the 8.3 form.
+ """
+ fname, ext = os.path.splitext(file)
+ proper_ext = len(ext) == 0 or (2 <= len(ext) <= 4) # the ext contains the dot
+ proper_fname = file.isupper() and len(fname) <= 8
+
+ return proper_ext and proper_fname
+
+def gen_dos_short_file_name(file, filename_set):
+ """ see http://support.microsoft.com/default.aspx?scid=kb;en-us;Q142982
+
+ These are no complete 8.3 dos short names. The ~ char is missing and
+ replaced with one character from the filename. WiX warns about such
+ filenames, since a collision might occur. Google for "CNDL1014" for
+ more information.
+ """
+ # guard this to not confuse the generation
+ if is_dos_short_file_name(file):
+ return file
+
+ fname, ext = os.path.splitext(file) # ext contains the dot
+
+ # first try if it suffices to convert to upper
+ file = file.upper()
+ if is_dos_short_file_name(file):
+ return file
+
+ # strip forbidden characters.
+ forbidden = '."/[]:;=, '
+ fname = filter( lambda c : c not in forbidden, fname )
+
+ # check if we already generated a filename with the same number:
+ # thisis1.txt, thisis2.txt etc.
+ duplicate, num = not None, 1
+ while duplicate:
+ shortname = "%s%s" % (fname[:8-len(str(num))].upper(),\
+ str(num))
+ if len(ext) >= 2:
+ shortname = "%s%s" % (shortname, ext[:4].upper())
+
+ duplicate, num = shortname in filename_set, num+1
+
+ assert( is_dos_short_file_name(shortname) ), 'shortname is %s, longname is %s' % (shortname, file)
+ filename_set.append(shortname)
+ return shortname
+
+def create_feature_dict(files):
+ """ X_MSI_FEATURE and doc FileTag's can be used to collect files in a
+ hierarchy. This function collects the files into this hierarchy.
+ """
+ dict = {}
+
+ def add_to_dict( feature, file ):
+ if not SCons.Util.is_List( feature ):
+ feature = [ feature ]
+
+ for f in feature:
+ if not dict.has_key( f ):
+ dict[ f ] = [ file ]
+ else:
+ dict[ f ].append( file )
+
+ for file in files:
+ if hasattr( file, 'PACKAGING_X_MSI_FEATURE' ):
+ add_to_dict(file.PACKAGING_X_MSI_FEATURE, file)
+ elif hasattr( file, 'PACKAGING_DOC' ):
+ add_to_dict( 'PACKAGING_DOC', file )
+ else:
+ add_to_dict( 'default', file )
+
+ return dict
+
+def generate_guids(root):
+ """ generates globally unique identifiers for parts of the xml which need
+ them.
+
+ Component tags have a special requirement. Their UUID is only allowed to
+ change if the list of their contained resources has changed. This allows
+ for clean removal and proper updates.
+
+ To handle this requirement, the uuid is generated with an md5 hashing the
+ whole subtree of a xml node.
+ """
+ from md5 import md5
+
+ # specify which tags need a guid and in which attribute this should be stored.
+ needs_id = { 'Product' : 'Id',
+ 'Package' : 'Id',
+ 'Component' : 'Guid',
+ }
+
+ # find all XMl nodes matching the key, retrieve their attribute, hash their
+ # subtree, convert hash to string and add as a attribute to the xml node.
+ for (key,value) in needs_id.items():
+ node_list = root.getElementsByTagName(key)
+ attribute = value
+ for node in node_list:
+ hash = md5(node.toxml()).hexdigest()
+ hash_str = '%s-%s-%s-%s-%s' % ( hash[:8], hash[8:12], hash[12:16], hash[16:20], hash[20:] )
+ node.attributes[attribute] = hash_str
+
+
+
+def string_wxsfile(target, source, env):
+ return "building WiX file %s"%( target[0].path )
+
+def build_wxsfile(target, source, env):
+ """ compiles a .wxs file from the keywords given in env['msi_spec'] and
+ by analyzing the tree of source nodes and their tags.
+ """
+ file = open(target[0].abspath, 'w')
+
+ try:
+ # Create a document with the Wix root tag
+ doc = Document()
+ root = doc.createElement( 'Wix' )
+ root.attributes['xmlns']='http://schemas.microsoft.com/wix/2003/01/wi'
+ doc.appendChild( root )
+
+ filename_set = [] # this is to circumvent duplicates in the shortnames
+ id_set = {} # this is to circumvent duplicates in the ids
+
+ # Create the content
+ build_wxsfile_header_section(root, env)
+ build_wxsfile_file_section(root, source, env['NAME'], env['VERSION'], env['VENDOR'], filename_set, id_set)
+ generate_guids(root)
+ build_wxsfile_features_section(root, source, env['NAME'], env['VERSION'], env['SUMMARY'], id_set)
+ build_wxsfile_default_gui(root)
+ build_license_file(target[0].get_dir(), env)
+
+ # write the xml to a file
+ file.write( doc.toprettyxml() )
+
+ # call a user specified function
+ if env.has_key('CHANGE_SPECFILE'):
+ env['CHANGE_SPECFILE'](target, source)
+
+ except KeyError, e:
+ raise SCons.Errors.UserError( '"%s" package field for MSI is missing.' % e.args[0] )
+
+#
+# setup function
+#
+def create_default_directory_layout(root, NAME, VERSION, VENDOR, filename_set):
+ """ Create the wix default target directory layout and return the innermost
+ directory.
+
+ We assume that the XML tree delivered in the root argument already contains
+ the Product tag.
+
+ Everything is put under the PFiles directory property defined by WiX.
+ After that a directory with the 'VENDOR' tag is placed and then a
+ directory with the name of the project and its VERSION. This leads to the
+ following TARGET Directory Layout:
+ C:\<PFiles>\<Vendor>\<Projectname-Version>\
+ Example: C:\Programme\Company\Product-1.2\
+ """
+ doc = Document()
+ d1 = doc.createElement( 'Directory' )
+ d1.attributes['Id'] = 'TARGETDIR'
+ d1.attributes['Name'] = 'SourceDir'
+
+ d2 = doc.createElement( 'Directory' )
+ d2.attributes['Id'] = 'ProgramFilesFolder'
+ d2.attributes['Name'] = 'PFiles'
+
+ d3 = doc.createElement( 'Directory' )
+ d3.attributes['Id'] = 'VENDOR_folder'
+ d3.attributes['Name'] = escape( gen_dos_short_file_name( VENDOR, filename_set ) )
+ d3.attributes['LongName'] = escape( VENDOR )
+
+ d4 = doc.createElement( 'Directory' )
+ project_folder = "%s-%s" % ( NAME, VERSION )
+ d4.attributes['Id'] = 'MY_DEFAULT_FOLDER'
+ d4.attributes['Name'] = escape( gen_dos_short_file_name( project_folder, filename_set ) )
+ d4.attributes['LongName'] = escape( project_folder )
+
+ d1.childNodes.append( d2 )
+ d2.childNodes.append( d3 )
+ d3.childNodes.append( d4 )
+
+ root.getElementsByTagName('Product')[0].childNodes.append( d1 )
+
+ return d4
+
+#
+# mandatory and optional file tags
+#
+def build_wxsfile_file_section(root, files, NAME, VERSION, VENDOR, filename_set, id_set):
+ """ builds the Component sections of the wxs file with their included files.
+
+ Files need to be specified in 8.3 format and in the long name format, long
+ filenames will be converted automatically.
+
+ Features are specficied with the 'X_MSI_FEATURE' or 'DOC' FileTag.
+ """
+ root = create_default_directory_layout( root, NAME, VERSION, VENDOR, filename_set )
+ components = create_feature_dict( files )
+ factory = Document()
+
+ def get_directory( node, dir ):
+ """ returns the node under the given node representing the directory.
+
+ Returns the component node if dir is None or empty.
+ """
+ if dir == '' or not dir:
+ return node
+
+ Directory = node
+ dir_parts = dir.split(os.path.sep)
+
+ # to make sure that our directory ids are unique, the parent folders are
+ # consecutively added to upper_dir
+ upper_dir = ''
+
+ # walk down the xml tree finding parts of the directory
+ dir_parts = filter( lambda d: d != '', dir_parts )
+ for d in dir_parts[:]:
+ already_created = filter( lambda c: c.nodeName == 'Directory' and c.attributes['LongName'].value == escape(d), Directory.childNodes )
+
+ if already_created != []:
+ Directory = already_created[0]
+ dir_parts.remove(d)
+ upper_dir += d
+ else:
+ break
+
+ for d in dir_parts:
+ nDirectory = factory.createElement( 'Directory' )
+ nDirectory.attributes['LongName'] = escape( d )
+ nDirectory.attributes['Name'] = escape( gen_dos_short_file_name( d, filename_set ) )
+ upper_dir += d
+ nDirectory.attributes['Id'] = convert_to_id( upper_dir, id_set )
+
+ Directory.childNodes.append( nDirectory )
+ Directory = nDirectory
+
+ return Directory
+
+ for file in files:
+ drive, path = os.path.splitdrive( file.PACKAGING_INSTALL_LOCATION )
+ filename = os.path.basename( path )
+ dirname = os.path.dirname( path )
+
+ h = {
+ # tagname : default value
+ 'PACKAGING_X_MSI_VITAL' : 'yes',
+ 'PACKAGING_X_MSI_FILEID' : convert_to_id(filename, id_set),
+ 'PACKAGING_X_MSI_LONGNAME' : filename,
+ 'PACKAGING_X_MSI_SHORTNAME' : gen_dos_short_file_name(filename, filename_set),
+ 'PACKAGING_X_MSI_SOURCE' : file.get_path(),
+ }
+
+ # fill in the default tags given above.
+ for k,v in [ (k, v) for (k,v) in h.items() if not hasattr(file, k) ]:
+ setattr( file, k, v )
+
+ File = factory.createElement( 'File' )
+ File.attributes['LongName'] = escape( file.PACKAGING_X_MSI_LONGNAME )
+ File.attributes['Name'] = escape( file.PACKAGING_X_MSI_SHORTNAME )
+ File.attributes['Source'] = escape( file.PACKAGING_X_MSI_SOURCE )
+ File.attributes['Id'] = escape( file.PACKAGING_X_MSI_FILEID )
+ File.attributes['Vital'] = escape( file.PACKAGING_X_MSI_VITAL )
+
+ # create the <Component> Tag under which this file should appear
+ Component = factory.createElement('Component')
+ Component.attributes['DiskId'] = '1'
+ Component.attributes['Id'] = convert_to_id( filename, id_set )
+
+ # hang the component node under the root node and the file node
+ # under the component node.
+ Directory = get_directory( root, dirname )
+ Directory.childNodes.append( Component )
+ Component.childNodes.append( File )
+
+#
+# additional functions
+#
+def build_wxsfile_features_section(root, files, NAME, VERSION, SUMMARY, id_set):
+ """ This function creates the <features> tag based on the supplied xml tree.
+
+ This is achieved by finding all <component>s and adding them to a default target.
+
+ It should be called after the tree has been built completly. We assume
+ that a MY_DEFAULT_FOLDER Property is defined in the wxs file tree.
+
+ Furthermore a top-level with the name and VERSION of the software will be created.
+
+ An PACKAGING_X_MSI_FEATURE can either be a string, where the feature
+ DESCRIPTION will be the same as its title or a Tuple, where the first
+ part will be its title and the second its DESCRIPTION.
+ """
+ factory = Document()
+ Feature = factory.createElement('Feature')
+ Feature.attributes['Id'] = 'complete'
+ Feature.attributes['ConfigurableDirectory'] = 'MY_DEFAULT_FOLDER'
+ Feature.attributes['Level'] = '1'
+ Feature.attributes['Title'] = escape( '%s %s' % (NAME, VERSION) )
+ Feature.attributes['Description'] = escape( SUMMARY )
+ Feature.attributes['Display'] = 'expand'
+
+ for (feature, files) in create_feature_dict(files).items():
+ SubFeature = factory.createElement('Feature')
+ SubFeature.attributes['Level'] = '1'
+
+ if SCons.Util.is_Tuple(feature):
+ SubFeature.attributes['Id'] = convert_to_id( feature[0], id_set )
+ SubFeature.attributes['Title'] = escape(feature[0])
+ SubFeature.attributes['Description'] = escape(feature[1])
+ else:
+ SubFeature.attributes['Id'] = convert_to_id( feature, id_set )
+ if feature=='default':
+ SubFeature.attributes['Description'] = 'Main Part'
+ SubFeature.attributes['Title'] = 'Main Part'
+ elif feature=='PACKAGING_DOC':
+ SubFeature.attributes['Description'] = 'Documentation'
+ SubFeature.attributes['Title'] = 'Documentation'
+ else:
+ SubFeature.attributes['Description'] = escape(feature)
+ SubFeature.attributes['Title'] = escape(feature)
+
+ # build the componentrefs. As one of the design decision is that every
+ # file is also a component we walk the list of files and create a
+ # reference.
+ for f in files:
+ ComponentRef = factory.createElement('ComponentRef')
+ ComponentRef.attributes['Id'] = convert_to_id( os.path.basename(f.get_path()), id_set )
+ SubFeature.childNodes.append(ComponentRef)
+
+ Feature.childNodes.append(SubFeature)
+
+ root.getElementsByTagName('Product')[0].childNodes.append(Feature)
+
+def build_wxsfile_default_gui(root):
+ """ this function adds a default GUI to the wxs file
+ """
+ factory = Document()
+ Product = root.getElementsByTagName('Product')[0]
+
+ UIRef = factory.createElement('UIRef')
+ UIRef.attributes['Id'] = 'WixUI_Mondo'
+ Product.childNodes.append(UIRef)
+
+ UIRef = factory.createElement('UIRef')
+ UIRef.attributes['Id'] = 'WixUI_ErrorProgressText'
+ Product.childNodes.append(UIRef)
+
+def build_license_file(directory, spec):
+ """ creates a License.rtf file with the content of "X_MSI_LICENSE_TEXT"
+ in the given directory
+ """
+ name, text = '', ''
+
+ try:
+ name = spec['LICENSE']
+ text = spec['X_MSI_LICENSE_TEXT']
+ except KeyError:
+ pass # ignore this as X_MSI_LICENSE_TEXT is optional
+
+ if name!='' or text!='':
+ file = open( os.path.join(directory.get_path(), 'License.rtf'), 'w' )
+ file.write('{\\rtf')
+ if text!='':
+ file.write(text.replace('\n', '\\par '))
+ else:
+ file.write(name+'\\par\\par')
+ file.write('}')
+ file.close()
+
+#
+# mandatory and optional package tags
+#
+def build_wxsfile_header_section(root, spec):
+ """ Adds the xml file node which define the package meta-data.
+ """
+ # Create the needed DOM nodes and add them at the correct position in the tree.
+ factory = Document()
+ Product = factory.createElement( 'Product' )
+ Package = factory.createElement( 'Package' )
+
+ root.childNodes.append( Product )
+ Product.childNodes.append( Package )
+
+ # set "mandatory" default values
+ if not spec.has_key('X_MSI_LANGUAGE'):
+ spec['X_MSI_LANGUAGE'] = '1033' # select english
+
+ # mandatory sections, will throw a KeyError if the tag is not available
+ Product.attributes['Name'] = escape( spec['NAME'] )
+ Product.attributes['Version'] = escape( spec['VERSION'] )
+ Product.attributes['Manufacturer'] = escape( spec['VENDOR'] )
+ Product.attributes['Language'] = escape( spec['X_MSI_LANGUAGE'] )
+ Package.attributes['Description'] = escape( spec['SUMMARY'] )
+
+ # now the optional tags, for which we avoid the KeyErrror exception
+ if spec.has_key( 'DESCRIPTION' ):
+ Package.attributes['Comments'] = escape( spec['DESCRIPTION'] )
+
+ if spec.has_key( 'X_MSI_UPGRADE_CODE' ):
+ Package.attributes['X_MSI_UPGRADE_CODE'] = escape( spec['X_MSI_UPGRADE_CODE'] )
+
+ # We hardcode the media tag as our current model cannot handle it.
+ Media = factory.createElement('Media')
+ Media.attributes['Id'] = '1'
+ Media.attributes['Cabinet'] = 'default.cab'
+ Media.attributes['EmbedCab'] = 'yes'
+ root.getElementsByTagName('Product')[0].childNodes.append(Media)
+
+# this builder is the entry-point for .wxs file compiler.
+wxs_builder = Builder(
+ action = Action( build_wxsfile, string_wxsfile ),
+ ensure_suffix = '.wxs' )
+
+def package(env, target, source, PACKAGEROOT, NAME, VERSION,
+ DESCRIPTION, SUMMARY, VENDOR, X_MSI_LANGUAGE, **kw):
+ # make sure that the Wix Builder is in the environment
+ SCons.Tool.Tool('wix').generate(env)
+
+ # get put the keywords for the specfile compiler. These are the arguments
+ # given to the package function and all optional ones stored in kw, minus
+ # the the source, target and env one.
+ loc = locals()
+ del loc['kw']
+ kw.update(loc)
+ del kw['source'], kw['target'], kw['env']
+
+ # strip the install builder from the source files
+ target, source = stripinstallbuilder(target, source, env)
+
+ # put the arguments into the env and call the specfile builder.
+ env['msi_spec'] = kw
+ specfile = apply( wxs_builder, [env, target, source], kw )
+
+ # now call the WiX Tool with the built specfile added as a source.
+ msifile = env.WiX(target, specfile)
+
+ # return the target and source tuple.
+ return (msifile, source+[specfile])
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/rpm.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/rpm.py
new file mode 100644
index 000000000..984155989
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/rpm.py
@@ -0,0 +1,362 @@
+"""SCons.Tool.Packaging.rpm
+
+The rpm packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/rpm.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import string
+
+import SCons.Builder
+
+from SCons.Environment import OverrideEnvironment
+from SCons.Tool.packaging import stripinstallbuilder, src_targz
+from SCons.Errors import UserError
+
+def package(env, target, source, PACKAGEROOT, NAME, VERSION,
+ PACKAGEVERSION, DESCRIPTION, SUMMARY, X_RPM_GROUP, LICENSE,
+ **kw):
+ # initialize the rpm tool
+ SCons.Tool.Tool('rpm').generate(env)
+
+ bld = env['BUILDERS']['Rpm']
+
+ # Generate a UserError whenever the target name has been set explicitly,
+ # since rpm does not allow for controlling it. This is detected by
+ # checking if the target has been set to the default by the Package()
+ # Environment function.
+ if str(target[0])!="%s-%s"%(NAME, VERSION):
+ raise UserError( "Setting target is not supported for rpm." )
+ else:
+ # This should be overridable from the construction environment,
+ # which it is by using ARCHITECTURE=.
+ # Guessing based on what os.uname() returns at least allows it
+ # to work for both i386 and x86_64 Linux systems.
+ archmap = {
+ 'i686' : 'i386',
+ 'i586' : 'i386',
+ 'i486' : 'i386',
+ }
+
+ buildarchitecture = os.uname()[4]
+ buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
+
+ if kw.has_key('ARCHITECTURE'):
+ buildarchitecture = kw['ARCHITECTURE']
+
+ fmt = '%s-%s-%s.%s.rpm'
+ srcrpm = fmt % (NAME, VERSION, PACKAGEVERSION, 'src')
+ binrpm = fmt % (NAME, VERSION, PACKAGEVERSION, buildarchitecture)
+
+ target = [ srcrpm, binrpm ]
+
+ # get the correct arguments into the kw hash
+ loc=locals()
+ del loc['kw']
+ kw.update(loc)
+ del kw['source'], kw['target'], kw['env']
+
+ # if no "SOURCE_URL" tag is given add a default one.
+ if not kw.has_key('SOURCE_URL'):
+ #kw['SOURCE_URL']=(str(target[0])+".tar.gz").replace('.rpm', '')
+ kw['SOURCE_URL']=string.replace(str(target[0])+".tar.gz", '.rpm', '')
+
+ # mangle the source and target list for the rpmbuild
+ env = OverrideEnvironment(env, kw)
+ target, source = stripinstallbuilder(target, source, env)
+ target, source = addspecfile(target, source, env)
+ target, source = collectintargz(target, source, env)
+
+ # now call the rpm builder to actually build the packet.
+ return apply(bld, [env, target, source], kw)
+
+def collectintargz(target, source, env):
+ """ Puts all source files into a tar.gz file. """
+ # the rpm tool depends on a source package, until this is chagned
+ # this hack needs to be here that tries to pack all sources in.
+ sources = env.FindSourceFiles()
+
+ # filter out the target we are building the source list for.
+ #sources = [s for s in sources if not (s in target)]
+ sources = filter(lambda s, t=target: not (s in t), sources)
+
+ # find the .spec file for rpm and add it since it is not necessarily found
+ # by the FindSourceFiles function.
+ #sources.extend( [s for s in source if str(s).rfind('.spec')!=-1] )
+ spec_file = lambda s: string.rfind(str(s), '.spec') != -1
+ sources.extend( filter(spec_file, source) )
+
+ # as the source contains the url of the source package this rpm package
+ # is built from, we extract the target name
+ #tarball = (str(target[0])+".tar.gz").replace('.rpm', '')
+ tarball = string.replace(str(target[0])+".tar.gz", '.rpm', '')
+ try:
+ #tarball = env['SOURCE_URL'].split('/')[-1]
+ tarball = string.split(env['SOURCE_URL'], '/')[-1]
+ except KeyError, e:
+ raise SCons.Errors.UserError( "Missing PackageTag '%s' for RPM packager" % e.args[0] )
+
+ tarball = src_targz.package(env, source=sources, target=tarball,
+ PACKAGEROOT=env['PACKAGEROOT'], )
+
+ return (target, tarball)
+
+def addspecfile(target, source, env):
+ specfile = "%s-%s" % (env['NAME'], env['VERSION'])
+
+ bld = SCons.Builder.Builder(action = build_specfile,
+ suffix = '.spec',
+ target_factory = SCons.Node.FS.File)
+
+ source.extend(bld(env, specfile, source))
+
+ return (target,source)
+
+def build_specfile(target, source, env):
+ """ Builds a RPM specfile from a dictionary with string metadata and
+ by analyzing a tree of nodes.
+ """
+ file = open(target[0].abspath, 'w')
+ str = ""
+
+ try:
+ file.write( build_specfile_header(env) )
+ file.write( build_specfile_sections(env) )
+ file.write( build_specfile_filesection(env, source) )
+ file.close()
+
+ # call a user specified function
+ if env.has_key('CHANGE_SPECFILE'):
+ env['CHANGE_SPECFILE'](target, source)
+
+ except KeyError, e:
+ raise SCons.Errors.UserError( '"%s" package field for RPM is missing.' % e.args[0] )
+
+
+#
+# mandatory and optional package tag section
+#
+def build_specfile_sections(spec):
+ """ Builds the sections of a rpm specfile.
+ """
+ str = ""
+
+ mandatory_sections = {
+ 'DESCRIPTION' : '\n%%description\n%s\n\n', }
+
+ str = str + SimpleTagCompiler(mandatory_sections).compile( spec )
+
+ optional_sections = {
+ 'DESCRIPTION_' : '%%description -l %s\n%s\n\n',
+ 'CHANGELOG' : '%%changelog\n%s\n\n',
+ 'X_RPM_PREINSTALL' : '%%pre\n%s\n\n',
+ 'X_RPM_POSTINSTALL' : '%%post\n%s\n\n',
+ 'X_RPM_PREUNINSTALL' : '%%preun\n%s\n\n',
+ 'X_RPM_POSTUNINSTALL' : '%%postun\n%s\n\n',
+ 'X_RPM_VERIFY' : '%%verify\n%s\n\n',
+
+ # These are for internal use but could possibly be overriden
+ 'X_RPM_PREP' : '%%prep\n%s\n\n',
+ 'X_RPM_BUILD' : '%%build\n%s\n\n',
+ 'X_RPM_INSTALL' : '%%install\n%s\n\n',
+ 'X_RPM_CLEAN' : '%%clean\n%s\n\n',
+ }
+
+ # Default prep, build, install and clean rules
+ # TODO: optimize those build steps, to not compile the project a second time
+ if not spec.has_key('X_RPM_PREP'):
+ spec['X_RPM_PREP'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"' + '\n%setup -q'
+
+ if not spec.has_key('X_RPM_BUILD'):
+ spec['X_RPM_BUILD'] = 'mkdir "$RPM_BUILD_ROOT"'
+
+ if not spec.has_key('X_RPM_INSTALL'):
+ spec['X_RPM_INSTALL'] = 'scons --install-sandbox="$RPM_BUILD_ROOT" "$RPM_BUILD_ROOT"'
+
+ if not spec.has_key('X_RPM_CLEAN'):
+ spec['X_RPM_CLEAN'] = '[ -n "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != / ] && rm -rf "$RPM_BUILD_ROOT"'
+
+ str = str + SimpleTagCompiler(optional_sections, mandatory=0).compile( spec )
+
+ return str
+
+def build_specfile_header(spec):
+ """ Builds all section but the %file of a rpm specfile
+ """
+ str = ""
+
+ # first the mandatory sections
+ mandatory_header_fields = {
+ 'NAME' : '%%define name %s\nName: %%{name}\n',
+ 'VERSION' : '%%define version %s\nVersion: %%{version}\n',
+ 'PACKAGEVERSION' : '%%define release %s\nRelease: %%{release}\n',
+ 'X_RPM_GROUP' : 'Group: %s\n',
+ 'SUMMARY' : 'Summary: %s\n',
+ 'LICENSE' : 'License: %s\n', }
+
+ str = str + SimpleTagCompiler(mandatory_header_fields).compile( spec )
+
+ # now the optional tags
+ optional_header_fields = {
+ 'VENDOR' : 'Vendor: %s\n',
+ 'X_RPM_URL' : 'Url: %s\n',
+ 'SOURCE_URL' : 'Source: %s\n',
+ 'SUMMARY_' : 'Summary(%s): %s\n',
+ 'X_RPM_DISTRIBUTION' : 'Distribution: %s\n',
+ 'X_RPM_ICON' : 'Icon: %s\n',
+ 'X_RPM_PACKAGER' : 'Packager: %s\n',
+ 'X_RPM_GROUP_' : 'Group(%s): %s\n',
+
+ 'X_RPM_REQUIRES' : 'Requires: %s\n',
+ 'X_RPM_PROVIDES' : 'Provides: %s\n',
+ 'X_RPM_CONFLICTS' : 'Conflicts: %s\n',
+ 'X_RPM_BUILDREQUIRES' : 'BuildRequires: %s\n',
+
+ 'X_RPM_SERIAL' : 'Serial: %s\n',
+ 'X_RPM_EPOCH' : 'Epoch: %s\n',
+ 'X_RPM_AUTOREQPROV' : 'AutoReqProv: %s\n',
+ 'X_RPM_EXCLUDEARCH' : 'ExcludeArch: %s\n',
+ 'X_RPM_EXCLUSIVEARCH' : 'ExclusiveArch: %s\n',
+ 'X_RPM_PREFIX' : 'Prefix: %s\n',
+ 'X_RPM_CONFLICTS' : 'Conflicts: %s\n',
+
+ # internal use
+ 'X_RPM_BUILDROOT' : 'BuildRoot: %s\n', }
+
+ # fill in default values:
+ # Adding a BuildRequires renders the .rpm unbuildable under System, which
+ # are not managed by rpm, since the database to resolve this dependency is
+ # missing (take Gentoo as an example)
+# if not s.has_key('x_rpm_BuildRequires'):
+# s['x_rpm_BuildRequires'] = 'scons'
+
+ if not spec.has_key('X_RPM_BUILDROOT'):
+ spec['X_RPM_BUILDROOT'] = '%{_tmppath}/%{name}-%{version}-%{release}'
+
+ str = str + SimpleTagCompiler(optional_header_fields, mandatory=0).compile( spec )
+ return str
+
+#
+# mandatory and optional file tags
+#
+def build_specfile_filesection(spec, files):
+ """ builds the %file section of the specfile
+ """
+ str = '%files\n'
+
+ if not spec.has_key('X_RPM_DEFATTR'):
+ spec['X_RPM_DEFATTR'] = '(-,root,root)'
+
+ str = str + '%%defattr %s\n' % spec['X_RPM_DEFATTR']
+
+ supported_tags = {
+ 'PACKAGING_CONFIG' : '%%config %s',
+ 'PACKAGING_CONFIG_NOREPLACE' : '%%config(noreplace) %s',
+ 'PACKAGING_DOC' : '%%doc %s',
+ 'PACKAGING_UNIX_ATTR' : '%%attr %s',
+ 'PACKAGING_LANG_' : '%%lang(%s) %s',
+ 'PACKAGING_X_RPM_VERIFY' : '%%verify %s',
+ 'PACKAGING_X_RPM_DIR' : '%%dir %s',
+ 'PACKAGING_X_RPM_DOCDIR' : '%%docdir %s',
+ 'PACKAGING_X_RPM_GHOST' : '%%ghost %s', }
+
+ for file in files:
+ # build the tagset
+ tags = {}
+ for k in supported_tags.keys():
+ try:
+ tags[k]=getattr(file, k)
+ except AttributeError:
+ pass
+
+ # compile the tagset
+ str = str + SimpleTagCompiler(supported_tags, mandatory=0).compile( tags )
+
+ str = str + ' '
+ str = str + file.PACKAGING_INSTALL_LOCATION
+ str = str + '\n\n'
+
+ return str
+
+class SimpleTagCompiler:
+ """ This class is a simple string substition utility:
+ the replacement specfication is stored in the tagset dictionary, something
+ like:
+ { "abc" : "cdef %s ",
+ "abc_" : "cdef %s %s" }
+
+ the compile function gets a value dictionary, which may look like:
+ { "abc" : "ghij",
+ "abc_gh" : "ij" }
+
+ The resulting string will be:
+ "cdef ghij cdef gh ij"
+ """
+ def __init__(self, tagset, mandatory=1):
+ self.tagset = tagset
+ self.mandatory = mandatory
+
+ def compile(self, values):
+ """ compiles the tagset and returns a str containing the result
+ """
+ def is_international(tag):
+ #return tag.endswith('_')
+ return tag[-1:] == '_'
+
+ def get_country_code(tag):
+ return tag[-2:]
+
+ def strip_country_code(tag):
+ return tag[:-2]
+
+ replacements = self.tagset.items()
+
+ str = ""
+ #domestic = [ (k,v) for k,v in replacements if not is_international(k) ]
+ domestic = filter(lambda t, i=is_international: not i(t[0]), replacements)
+ for key, replacement in domestic:
+ try:
+ str = str + replacement % values[key]
+ except KeyError, e:
+ if self.mandatory:
+ raise e
+
+ #international = [ (k,v) for k,v in replacements if is_international(k) ]
+ international = filter(lambda t, i=is_international: i(t[0]), replacements)
+ for key, replacement in international:
+ try:
+ #int_values_for_key = [ (get_country_code(k),v) for k,v in values.items() if strip_country_code(k) == key ]
+ x = filter(lambda t,key=key,s=strip_country_code: s(t[0]) == key, values.items())
+ int_values_for_key = map(lambda t,g=get_country_code: (g(t[0]),t[1]), x)
+ for v in int_values_for_key:
+ str = str + replacement % v
+ except KeyError, e:
+ if self.mandatory:
+ raise e
+
+ return str
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_tarbz2.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_tarbz2.py
new file mode 100644
index 000000000..7dafa31ab
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_tarbz2.py
@@ -0,0 +1,37 @@
+"""SCons.Tool.Packaging.tarbz2
+
+The tarbz2 SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/src_tarbz2.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Tar']
+ bld.set_suffix('.tar.bz2')
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0)
+ return bld(env, target, source, TARFLAGS='-jc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_targz.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_targz.py
new file mode 100644
index 000000000..b60ceee5d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_targz.py
@@ -0,0 +1,37 @@
+"""SCons.Tool.Packaging.targz
+
+The targz SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/src_targz.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Tar']
+ bld.set_suffix('.tar.gz')
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0)
+ return bld(env, target, source, TARFLAGS='-zc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_zip.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_zip.py
new file mode 100644
index 000000000..d76f24dfa
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/src_zip.py
@@ -0,0 +1,37 @@
+"""SCons.Tool.Packaging.zip
+
+The zip SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/src_zip.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Zip']
+ bld.set_suffix('.zip')
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT, honor_install_location=0)
+ return bld(env, target, source)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/tarbz2.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/tarbz2.py
new file mode 100644
index 000000000..69e9737bf
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/tarbz2.py
@@ -0,0 +1,38 @@
+"""SCons.Tool.Packaging.tarbz2
+
+The tarbz2 SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/tarbz2.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Tar']
+ bld.set_suffix('.tar.gz')
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT)
+ target, source = stripinstallbuilder(target, source, env)
+ return bld(env, target, source, TARFLAGS='-jc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/targz.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/targz.py
new file mode 100644
index 000000000..37a8bfebd
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/targz.py
@@ -0,0 +1,38 @@
+"""SCons.Tool.Packaging.targz
+
+The targz SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/targz.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Tar']
+ bld.set_suffix('.tar.gz')
+ target, source = stripinstallbuilder(target, source, env)
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT)
+ return bld(env, target, source, TARFLAGS='-zc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/zip.py b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/zip.py
new file mode 100644
index 000000000..3cd4dd829
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/packaging/zip.py
@@ -0,0 +1,38 @@
+"""SCons.Tool.Packaging.zip
+
+The zip SRC packager.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/packaging/zip.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
+
+def package(env, target, source, PACKAGEROOT, **kw):
+ bld = env['BUILDERS']['Zip']
+ bld.set_suffix('.zip')
+ target, source = stripinstallbuilder(target, source, env)
+ target, source = putintopackageroot(target, source, env, PACKAGEROOT)
+ return bld(env, target, source)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/pdf.py b/tools/scons/scons-local-1.2.0/SCons/Tool/pdf.py
new file mode 100644
index 000000000..bf6a83eec
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/pdf.py
@@ -0,0 +1,72 @@
+"""SCons.Tool.pdf
+
+Common PDF Builder definition for various other Tool modules that use it.
+Add an explicit action to run epstopdf to convert .eps files to .pdf
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/pdf.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Builder
+import SCons.Tool
+
+PDFBuilder = None
+
+EpsPdfAction = SCons.Action.Action('$EPSTOPDFCOM', '$EPSTOPDFCOMSTR')
+
+def generate(env):
+ try:
+ env['BUILDERS']['PDF']
+ except KeyError:
+ global PDFBuilder
+ if PDFBuilder is None:
+ PDFBuilder = SCons.Builder.Builder(action = {},
+ source_scanner = SCons.Tool.PDFLaTeXScanner,
+ prefix = '$PDFPREFIX',
+ suffix = '$PDFSUFFIX',
+ emitter = {},
+ source_ext_match = None,
+ single_source=True)
+ env['BUILDERS']['PDF'] = PDFBuilder
+
+ env['PDFPREFIX'] = ''
+ env['PDFSUFFIX'] = '.pdf'
+
+# put the epstopdf builder in this routine so we can add it after
+# the pdftex builder so that one is the default for no source suffix
+def generate2(env):
+ bld = env['BUILDERS']['PDF']
+ #bld.add_action('.ps', EpsPdfAction) # this is covered by direct Ghostcript action in gs.py
+ bld.add_action('.eps', EpsPdfAction)
+
+ env['EPSTOPDF'] = 'epstopdf'
+ env['EPSTOPDFFLAGS'] = SCons.Util.CLVar('')
+ env['EPSTOPDFCOM'] = '$EPSTOPDF $EPSTOPDFFLAGS ${SOURCE} -o ${TARGET}'
+
+def exists(env):
+ # This only puts a skeleton Builder in place, so if someone
+ # references this Tool directly, it's always "available."
+ return 1
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/pdflatex.py b/tools/scons/scons-local-1.2.0/SCons/Tool/pdflatex.py
new file mode 100644
index 000000000..5ffb9cb47
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/pdflatex.py
@@ -0,0 +1,75 @@
+"""SCons.Tool.pdflatex
+
+Tool-specific initialization for pdflatex.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/pdflatex.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Util
+import SCons.Tool.pdf
+import SCons.Tool.tex
+
+PDFLaTeXAction = None
+
+def PDFLaTeXAuxFunction(target = None, source= None, env=None):
+ result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env )
+ return result
+
+PDFLaTeXAuxAction = None
+
+def generate(env):
+ """Add Builders and construction variables for pdflatex to an Environment."""
+ global PDFLaTeXAction
+ if PDFLaTeXAction is None:
+ PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR')
+
+ global PDFLaTeXAuxAction
+ if PDFLaTeXAuxAction is None:
+ PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction,
+ strfunction=SCons.Tool.tex.TeXLaTeXStrFunction)
+
+ import pdf
+ pdf.generate(env)
+
+ bld = env['BUILDERS']['PDF']
+ bld.add_action('.ltx', PDFLaTeXAuxAction)
+ bld.add_action('.latex', PDFLaTeXAuxAction)
+ bld.add_emitter('.ltx', SCons.Tool.tex.tex_pdf_emitter)
+ bld.add_emitter('.latex', SCons.Tool.tex.tex_pdf_emitter)
+
+ env['PDFLATEX'] = 'pdflatex'
+ env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}'
+ env['LATEXRETRIES'] = 3
+
+def exists(env):
+ return env.Detect('pdflatex')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/pdftex.py b/tools/scons/scons-local-1.2.0/SCons/Tool/pdftex.py
new file mode 100644
index 000000000..2a5bfccd4
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/pdftex.py
@@ -0,0 +1,99 @@
+"""SCons.Tool.pdftex
+
+Tool-specific initialization for pdftex.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/pdftex.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Util
+import SCons.Tool.tex
+
+PDFTeXAction = None
+
+# This action might be needed more than once if we are dealing with
+# labels and bibtex.
+PDFLaTeXAction = None
+
+def PDFLaTeXAuxAction(target = None, source= None, env=None):
+ result = SCons.Tool.tex.InternalLaTeXAuxAction( PDFLaTeXAction, target, source, env )
+ return result
+
+def PDFTeXLaTeXFunction(target = None, source= None, env=None):
+ """A builder for TeX and LaTeX that scans the source file to
+ decide the "flavor" of the source and then executes the appropriate
+ program."""
+ if SCons.Tool.tex.is_LaTeX(source):
+ result = PDFLaTeXAuxAction(target,source,env)
+ else:
+ result = PDFTeXAction(target,source,env)
+ return result
+
+PDFTeXLaTeXAction = None
+
+def generate(env):
+ """Add Builders and construction variables for pdftex to an Environment."""
+ global PDFTeXAction
+ if PDFTeXAction is None:
+ PDFTeXAction = SCons.Action.Action('$PDFTEXCOM', '$PDFTEXCOMSTR')
+
+ global PDFLaTeXAction
+ if PDFLaTeXAction is None:
+ PDFLaTeXAction = SCons.Action.Action("$PDFLATEXCOM", "$PDFLATEXCOMSTR")
+
+ global PDFTeXLaTeXAction
+ if PDFTeXLaTeXAction is None:
+ PDFTeXLaTeXAction = SCons.Action.Action(PDFTeXLaTeXFunction,
+ strfunction=SCons.Tool.tex.TeXLaTeXStrFunction)
+
+ import pdf
+ pdf.generate(env)
+
+ bld = env['BUILDERS']['PDF']
+ bld.add_action('.tex', PDFTeXLaTeXAction)
+ bld.add_emitter('.tex', SCons.Tool.tex.tex_pdf_emitter)
+
+ # Add the epstopdf builder after the pdftex builder
+ # so pdftex is the default for no source suffix
+ pdf.generate2(env)
+
+ env['PDFTEX'] = 'pdftex'
+ env['PDFTEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['PDFTEXCOM'] = 'cd ${TARGET.dir} && $PDFTEX $PDFTEXFLAGS ${SOURCE.file}'
+
+ # Duplicate from latex.py. If latex.py goes away, then this is still OK.
+ env['PDFLATEX'] = 'pdflatex'
+ env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}'
+ env['LATEXRETRIES'] = 3
+
+def exists(env):
+ return env.Detect('pdftex')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/qt.py b/tools/scons/scons-local-1.2.0/SCons/Tool/qt.py
new file mode 100644
index 000000000..c4e5ca74f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/qt.py
@@ -0,0 +1,330 @@
+
+"""SCons.Tool.qt
+
+Tool-specific initialization for Qt.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/qt.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Scanner
+import SCons.Tool
+import SCons.Util
+
+class ToolQtWarning(SCons.Warnings.Warning):
+ pass
+
+class GeneratedMocFileNotIncluded(ToolQtWarning):
+ pass
+
+class QtdirNotFound(ToolQtWarning):
+ pass
+
+SCons.Warnings.enableWarningClass(ToolQtWarning)
+
+header_extensions = [".h", ".hxx", ".hpp", ".hh"]
+if SCons.Util.case_sensitive_suffixes('.h', '.H'):
+ header_extensions.append('.H')
+cplusplus = __import__('c++', globals(), locals(), [])
+cxx_suffixes = cplusplus.CXXSuffixes
+
+def checkMocIncluded(target, source, env):
+ moc = target[0]
+ cpp = source[0]
+ # looks like cpp.includes is cleared before the build stage :-(
+ # not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/
+ path = SCons.Defaults.CScan.path(env, moc.cwd)
+ includes = SCons.Defaults.CScan(cpp, env, path)
+ if not moc in includes:
+ SCons.Warnings.warn(
+ GeneratedMocFileNotIncluded,
+ "Generated moc file '%s' is not included by '%s'" %
+ (str(moc), str(cpp)))
+
+def find_file(filename, paths, node_factory):
+ for dir in paths:
+ node = node_factory(filename, dir)
+ if node.rexists():
+ return node
+ return None
+
+class _Automoc:
+ """
+ Callable class, which works as an emitter for Programs, SharedLibraries and
+ StaticLibraries.
+ """
+
+ def __init__(self, objBuilderName):
+ self.objBuilderName = objBuilderName
+
+ def __call__(self, target, source, env):
+ """
+ Smart autoscan function. Gets the list of objects for the Program
+ or Lib. Adds objects and builders for the special qt files.
+ """
+ try:
+ if int(env.subst('$QT_AUTOSCAN')) == 0:
+ return target, source
+ except ValueError:
+ pass
+ try:
+ debug = int(env.subst('$QT_DEBUG'))
+ except ValueError:
+ debug = 0
+
+ # some shortcuts used in the scanner
+ splitext = SCons.Util.splitext
+ objBuilder = getattr(env, self.objBuilderName)
+
+ # some regular expressions:
+ # Q_OBJECT detection
+ q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]')
+ # cxx and c comment 'eater'
+ #comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)')
+ # CW: something must be wrong with the regexp. See also bug #998222
+ # CURRENTLY THERE IS NO TEST CASE FOR THAT
+
+ # The following is kind of hacky to get builders working properly (FIXME)
+ objBuilderEnv = objBuilder.env
+ objBuilder.env = env
+ mocBuilderEnv = env.Moc.env
+ env.Moc.env = env
+
+ # make a deep copy for the result; MocH objects will be appended
+ out_sources = source[:]
+
+ for obj in source:
+ if not obj.has_builder():
+ # binary obj file provided
+ if debug:
+ print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj)
+ continue
+ cpp = obj.sources[0]
+ if not splitext(str(cpp))[1] in cxx_suffixes:
+ if debug:
+ print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp)
+ # c or fortran source
+ continue
+ #cpp_contents = comment.sub('', cpp.get_contents())
+ cpp_contents = cpp.get_contents()
+ h=None
+ for h_ext in header_extensions:
+ # try to find the header file in the corresponding source
+ # directory
+ hname = splitext(cpp.name)[0] + h_ext
+ h = find_file(hname, (cpp.get_dir(),), env.File)
+ if h:
+ if debug:
+ print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))
+ #h_contents = comment.sub('', h.get_contents())
+ h_contents = h.get_contents()
+ break
+ if not h and debug:
+ print "scons: qt: no header for '%s'." % (str(cpp))
+ if h and q_object_search.search(h_contents):
+ # h file with the Q_OBJECT macro found -> add moc_cpp
+ moc_cpp = env.Moc(h)
+ moc_o = objBuilder(moc_cpp)
+ out_sources.append(moc_o)
+ #moc_cpp.target_scanner = SCons.Defaults.CScan
+ if debug:
+ print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))
+ if cpp and q_object_search.search(cpp_contents):
+ # cpp file with Q_OBJECT macro found -> add moc
+ # (to be included in cpp)
+ moc = env.Moc(cpp)
+ env.Ignore(moc, moc)
+ if debug:
+ print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))
+ #moc.source_scanner = SCons.Defaults.CScan
+ # restore the original env attributes (FIXME)
+ objBuilder.env = objBuilderEnv
+ env.Moc.env = mocBuilderEnv
+
+ return (target, out_sources)
+
+AutomocShared = _Automoc('SharedObject')
+AutomocStatic = _Automoc('StaticObject')
+
+def _detect(env):
+ """Not really safe, but fast method to detect the QT library"""
+ QTDIR = None
+ if not QTDIR:
+ QTDIR = env.get('QTDIR',None)
+ if not QTDIR:
+ QTDIR = os.environ.get('QTDIR',None)
+ if not QTDIR:
+ moc = env.WhereIs('moc')
+ if moc:
+ QTDIR = os.path.dirname(os.path.dirname(moc))
+ SCons.Warnings.warn(
+ QtdirNotFound,
+ "Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
+ else:
+ QTDIR = None
+ SCons.Warnings.warn(
+ QtdirNotFound,
+ "Could not detect qt, using empty QTDIR")
+ return QTDIR
+
+def uicEmitter(target, source, env):
+ adjustixes = SCons.Util.adjustixes
+ bs = SCons.Util.splitext(str(source[0].name))[0]
+ bs = os.path.join(str(target[0].get_dir()),bs)
+ # first target (header) is automatically added by builder
+ if len(target) < 2:
+ # second target is implementation
+ target.append(adjustixes(bs,
+ env.subst('$QT_UICIMPLPREFIX'),
+ env.subst('$QT_UICIMPLSUFFIX')))
+ if len(target) < 3:
+ # third target is moc file
+ target.append(adjustixes(bs,
+ env.subst('$QT_MOCHPREFIX'),
+ env.subst('$QT_MOCHSUFFIX')))
+ return target, source
+
+def uicScannerFunc(node, env, path):
+ lookout = []
+ lookout.extend(env['CPPPATH'])
+ lookout.append(str(node.rfile().dir))
+ includes = re.findall("<include.*?>(.*?)</include>", node.get_contents())
+ result = []
+ for incFile in includes:
+ dep = env.FindFile(incFile,lookout)
+ if dep:
+ result.append(dep)
+ return result
+
+uicScanner = SCons.Scanner.Base(uicScannerFunc,
+ name = "UicScanner",
+ node_class = SCons.Node.FS.File,
+ node_factory = SCons.Node.FS.File,
+ recursive = 0)
+
+def generate(env):
+ """Add Builders and construction variables for qt to an Environment."""
+ CLVar = SCons.Util.CLVar
+ Action = SCons.Action.Action
+ Builder = SCons.Builder.Builder
+
+ env.SetDefault(QTDIR = _detect(env),
+ QT_BINPATH = os.path.join('$QTDIR', 'bin'),
+ QT_CPPPATH = os.path.join('$QTDIR', 'include'),
+ QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
+ QT_MOC = os.path.join('$QT_BINPATH','moc'),
+ QT_UIC = os.path.join('$QT_BINPATH','uic'),
+ QT_LIB = 'qt', # may be set to qt-mt
+
+ QT_AUTOSCAN = 1, # scan for moc'able sources
+
+ # Some QT specific flags. I don't expect someone wants to
+ # manipulate those ...
+ QT_UICIMPLFLAGS = CLVar(''),
+ QT_UICDECLFLAGS = CLVar(''),
+ QT_MOCFROMHFLAGS = CLVar(''),
+ QT_MOCFROMCXXFLAGS = CLVar('-i'),
+
+ # suffixes/prefixes for the headers / sources to generate
+ QT_UICDECLPREFIX = '',
+ QT_UICDECLSUFFIX = '.h',
+ QT_UICIMPLPREFIX = 'uic_',
+ QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
+ QT_MOCHPREFIX = 'moc_',
+ QT_MOCHSUFFIX = '$CXXFILESUFFIX',
+ QT_MOCCXXPREFIX = '',
+ QT_MOCCXXSUFFIX = '.moc',
+ QT_UISUFFIX = '.ui',
+
+ # Commands for the qt support ...
+ # command to generate header, implementation and moc-file
+ # from a .ui file
+ QT_UICCOM = [
+ CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
+ CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
+ '-o ${TARGETS[1]} $SOURCE'),
+ CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
+ # command to generate meta object information for a class
+ # declarated in a header
+ QT_MOCFROMHCOM = (
+ '$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
+ # command to generate meta object information for a class
+ # declarated in a cpp file
+ QT_MOCFROMCXXCOM = [
+ CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
+ Action(checkMocIncluded,None)])
+
+ # ... and the corresponding builders
+ uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
+ emitter=uicEmitter,
+ src_suffix='$QT_UISUFFIX',
+ suffix='$QT_UICDECLSUFFIX',
+ prefix='$QT_UICDECLPREFIX',
+ source_scanner=uicScanner)
+ mocBld = Builder(action={}, prefix={}, suffix={})
+ for h in header_extensions:
+ act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
+ mocBld.add_action(h, act)
+ mocBld.prefix[h] = '$QT_MOCHPREFIX'
+ mocBld.suffix[h] = '$QT_MOCHSUFFIX'
+ for cxx in cxx_suffixes:
+ act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
+ mocBld.add_action(cxx, act)
+ mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
+ mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
+
+ # register the builders
+ env['BUILDERS']['Uic'] = uicBld
+ env['BUILDERS']['Moc'] = mocBld
+ static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
+ static_obj.add_src_builder('Uic')
+ shared_obj.add_src_builder('Uic')
+
+ # We use the emitters of Program / StaticLibrary / SharedLibrary
+ # to scan for moc'able files
+ # We can't refer to the builders directly, we have to fetch them
+ # as Environment attributes because that sets them up to be called
+ # correctly later by our emitter.
+ env.AppendUnique(PROGEMITTER =[AutomocStatic],
+ SHLIBEMITTER=[AutomocShared],
+ LIBEMITTER =[AutomocStatic],
+ # Of course, we need to link against the qt libraries
+ CPPPATH=["$QT_CPPPATH"],
+ LIBPATH=["$QT_LIBPATH"],
+ LIBS=['$QT_LIB'])
+
+def exists(env):
+ return _detect(env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/rmic.py b/tools/scons/scons-local-1.2.0/SCons/Tool/rmic.py
new file mode 100644
index 000000000..03a228c98
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/rmic.py
@@ -0,0 +1,115 @@
+"""SCons.Tool.rmic
+
+Tool-specific initialization for rmic.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/rmic.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+
+import SCons.Action
+import SCons.Builder
+import SCons.Node.FS
+import SCons.Util
+
+def emit_rmic_classes(target, source, env):
+ """Create and return lists of Java RMI stub and skeleton
+ class files to be created from a set of class files.
+ """
+ class_suffix = env.get('JAVACLASSSUFFIX', '.class')
+ classdir = env.get('JAVACLASSDIR')
+
+ if not classdir:
+ try:
+ s = source[0]
+ except IndexError:
+ classdir = '.'
+ else:
+ try:
+ classdir = s.attributes.java_classdir
+ except AttributeError:
+ classdir = '.'
+ classdir = env.Dir(classdir).rdir()
+ if str(classdir) == '.':
+ c_ = None
+ else:
+ c_ = str(classdir) + os.sep
+
+ slist = []
+ for src in source:
+ try:
+ classname = src.attributes.java_classname
+ except AttributeError:
+ classname = str(src)
+ if c_ and classname[:len(c_)] == c_:
+ classname = classname[len(c_):]
+ if class_suffix and classname[:-len(class_suffix)] == class_suffix:
+ classname = classname[-len(class_suffix):]
+ s = src.rfile()
+ s.attributes.java_classdir = classdir
+ s.attributes.java_classname = classname
+ slist.append(s)
+
+ stub_suffixes = ['_Stub']
+ if env.get('JAVAVERSION') == '1.4':
+ stub_suffixes.append('_Skel')
+
+ tlist = []
+ for s in source:
+ for suff in stub_suffixes:
+ fname = string.replace(s.attributes.java_classname, '.', os.sep) + \
+ suff + class_suffix
+ t = target[0].File(fname)
+ t.attributes.java_lookupdir = target[0]
+ tlist.append(t)
+
+ return tlist, source
+
+RMICAction = SCons.Action.Action('$RMICCOM', '$RMICCOMSTR')
+
+RMICBuilder = SCons.Builder.Builder(action = RMICAction,
+ emitter = emit_rmic_classes,
+ src_suffix = '$JAVACLASSSUFFIX',
+ target_factory = SCons.Node.FS.Dir,
+ source_factory = SCons.Node.FS.File)
+
+def generate(env):
+ """Add Builders and construction variables for rmic to an Environment."""
+ env['BUILDERS']['RMIC'] = RMICBuilder
+
+ env['RMIC'] = 'rmic'
+ env['RMICFLAGS'] = SCons.Util.CLVar('')
+ env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}'
+ env['JAVACLASSSUFFIX'] = '.class'
+
+def exists(env):
+ return env.Detect('rmic')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/rpcgen.py b/tools/scons/scons-local-1.2.0/SCons/Tool/rpcgen.py
new file mode 100644
index 000000000..a70a6d16a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/rpcgen.py
@@ -0,0 +1,64 @@
+"""SCons.Tool.rpcgen
+
+Tool-specific initialization for RPCGEN tools.
+
+Three normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/rpcgen.py 3842 2008/12/20 22:59:52 scons"
+
+from SCons.Builder import Builder
+import SCons.Util
+
+cmd = "cd ${SOURCE.dir} && $RPCGEN -%s $RPCGENFLAGS %s -o ${TARGET.abspath} ${SOURCE.file}"
+
+rpcgen_client = cmd % ('l', '$RPCGENCLIENTFLAGS')
+rpcgen_header = cmd % ('h', '$RPCGENHEADERFLAGS')
+rpcgen_service = cmd % ('m', '$RPCGENSERVICEFLAGS')
+rpcgen_xdr = cmd % ('c', '$RPCGENXDRFLAGS')
+
+def generate(env):
+ "Add RPCGEN Builders and construction variables for an Environment."
+
+ client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
+ header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
+ service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
+ xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
+ env.Append(BUILDERS={'RPCGenClient' : client,
+ 'RPCGenHeader' : header,
+ 'RPCGenService' : service,
+ 'RPCGenXDR' : xdr})
+ env['RPCGEN'] = 'rpcgen'
+ env['RPCGENFLAGS'] = SCons.Util.CLVar('')
+ env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
+ env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
+ env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
+ env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('')
+
+def exists(env):
+ return env.Detect('rpcgen')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/rpm.py b/tools/scons/scons-local-1.2.0/SCons/Tool/rpm.py
new file mode 100644
index 000000000..6aadc948a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/rpm.py
@@ -0,0 +1,126 @@
+"""SCons.Tool.rpm
+
+Tool-specific initialization for rpm.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+The rpm tool calls the rpmbuild command. The first and only argument should a
+tar.gz consisting of the source file and a specfile.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/rpm.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import re
+import shutil
+import subprocess
+
+import SCons.Builder
+import SCons.Node.FS
+import SCons.Util
+import SCons.Action
+import SCons.Defaults
+
+def get_cmd(source, env):
+ tar_file_with_included_specfile = source
+ if SCons.Util.is_List(source):
+ tar_file_with_included_specfile = source[0]
+ return "%s %s %s"%(env['RPM'], env['RPMFLAGS'],
+ tar_file_with_included_specfile.abspath )
+
+def build_rpm(target, source, env):
+ # create a temporary rpm build root.
+ tmpdir = os.path.join( os.path.dirname( target[0].abspath ), 'rpmtemp' )
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+
+ # now create the mandatory rpm directory structure.
+ for d in ['RPMS', 'SRPMS', 'SPECS', 'BUILD']:
+ os.makedirs( os.path.join( tmpdir, d ) )
+
+ # set the topdir as an rpmflag.
+ env.Prepend( RPMFLAGS = '--define \'_topdir %s\'' % tmpdir )
+
+ # now call rpmbuild to create the rpm package.
+ handle = subprocess.Popen(get_cmd(source, env),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=True)
+ output = handle.stdout.read()
+ status = handle.wait()
+
+ if status:
+ raise SCons.Errors.BuildError( node=target[0],
+ errstr=output,
+ filename=str(target[0]) )
+ else:
+ # XXX: assume that LC_ALL=c is set while running rpmbuild
+ output_files = re.compile( 'Wrote: (.*)' ).findall( output )
+
+ for output, input in zip( output_files, target ):
+ rpm_output = os.path.basename(output)
+ expected = os.path.basename(input.get_path())
+
+ assert expected == rpm_output, "got %s but expected %s" % (rpm_output, expected)
+ shutil.copy( output, input.abspath )
+
+
+ # cleanup before leaving.
+ shutil.rmtree(tmpdir)
+
+ return status
+
+def string_rpm(target, source, env):
+ try:
+ return env['RPMCOMSTR']
+ except KeyError:
+ return get_cmd(source, env)
+
+rpmAction = SCons.Action.Action(build_rpm, string_rpm)
+
+RpmBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$RPMCOM', '$RPMCOMSTR'),
+ source_scanner = SCons.Defaults.DirScanner,
+ suffix = '$RPMSUFFIX')
+
+
+
+def generate(env):
+ """Add Builders and construction variables for rpm to an Environment."""
+ try:
+ bld = env['BUILDERS']['Rpm']
+ except KeyError:
+ bld = RpmBuilder
+ env['BUILDERS']['Rpm'] = bld
+
+ env.SetDefault(RPM = 'LC_ALL=c rpmbuild')
+ env.SetDefault(RPMFLAGS = SCons.Util.CLVar('-ta'))
+ env.SetDefault(RPMCOM = rpmAction)
+ env.SetDefault(RPMSUFFIX = '.rpm')
+
+def exists(env):
+ return env.Detect('rpmbuild')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sgiar.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sgiar.py
new file mode 100644
index 000000000..0be6780cf
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sgiar.py
@@ -0,0 +1,62 @@
+"""SCons.Tool.sgiar
+
+Tool-specific initialization for SGI ar (library archive). If CC
+exists, static libraries should be built with it, so the prelinker has
+a chance to resolve C++ template instantiations.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sgiar.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+
+ if env.Detect('CC'):
+ env['AR'] = 'CC'
+ env['ARFLAGS'] = SCons.Util.CLVar('-ar')
+ env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES'
+ else:
+ env['AR'] = 'ar'
+ env['ARFLAGS'] = SCons.Util.CLVar('r')
+ env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
+
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
+ env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
+ env['LIBPREFIX'] = 'lib'
+ env['LIBSUFFIX'] = '.a'
+
+def exists(env):
+ return env.Detect('CC') or env.Detect('ar')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sgic++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sgic++.py
new file mode 100644
index 000000000..da0efb65d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sgic++.py
@@ -0,0 +1,52 @@
+"""SCons.Tool.sgic++
+
+Tool-specific initialization for MIPSpro C++ on SGI.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sgic++.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+def generate(env):
+ """Add Builders and construction variables for SGI MIPS C++ to an Environment."""
+
+ cplusplus.generate(env)
+
+ env['CXX'] = 'CC'
+ env['CXXFLAGS'] = SCons.Util.CLVar('$CCFLAGS -LANG:std')
+ env['SHCXX'] = '$CXX'
+ env['SHOBJSUFFIX'] = '.o'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+
+def exists(env):
+ return env.Detect('CC')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sgicc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sgicc.py
new file mode 100644
index 000000000..57115695f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sgicc.py
@@ -0,0 +1,47 @@
+"""SCons.Tool.sgicc
+
+Tool-specific initialization for MIPSPro cc on SGI.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sgicc.py 3842 2008/12/20 22:59:52 scons"
+
+import cc
+
+def generate(env):
+ """Add Builders and construction variables for gcc to an Environment."""
+ cc.generate(env)
+
+ env['CXX'] = 'CC'
+ env['SHOBJSUFFIX'] = '.o'
+ env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
+
+def exists(env):
+ return env.Detect('cc')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sgilink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sgilink.py
new file mode 100644
index 000000000..0036e26be
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sgilink.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.sgilink
+
+Tool-specific initialization for the SGI MIPSPro linker on SGI.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sgilink.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import link
+
+linkers = ['CC', 'cc']
+
+def generate(env):
+ """Add Builders and construction variables for MIPSPro to an Environment."""
+ link.generate(env)
+
+ env['LINK'] = env.Detect(linkers) or 'cc'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
+
+ # __RPATH is set to $_RPATH in the platform specification if that
+ # platform supports it.
+ env.Append(LINKFLAGS=['$__RPATH'])
+ env['RPATHPREFIX'] = '-rpath '
+ env['RPATHSUFFIX'] = ''
+ env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
+
+def exists(env):
+ return env.Detect(linkers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunar.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunar.py
new file mode 100644
index 000000000..90dd156be
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunar.py
@@ -0,0 +1,61 @@
+"""engine.SCons.Tool.sunar
+
+Tool-specific initialization for Solaris (Forte) ar (library archive). If CC
+exists, static libraries should be built with it, so that template
+instantians can be resolved.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunar.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+def generate(env):
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+
+ if env.Detect('CC'):
+ env['AR'] = 'CC'
+ env['ARFLAGS'] = SCons.Util.CLVar('-xar')
+ env['ARCOM'] = '$AR $ARFLAGS -o $TARGET $SOURCES'
+ else:
+ env['AR'] = 'ar'
+ env['ARFLAGS'] = SCons.Util.CLVar('r')
+ env['ARCOM'] = '$AR $ARFLAGS $TARGET $SOURCES'
+
+ env['SHLINK'] = '$LINK'
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G')
+ env['SHLINKCOM'] = '$SHLINK $SHLINKFLAGS -o $TARGET $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
+ env['LIBPREFIX'] = 'lib'
+ env['LIBSUFFIX'] = '.a'
+
+def exists(env):
+ return env.Detect('CC') or env.Detect('ar')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunc++.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunc++.py
new file mode 100644
index 000000000..91c0efb22
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunc++.py
@@ -0,0 +1,100 @@
+"""SCons.Tool.sunc++
+
+Tool-specific initialization for C++ on SunOS / Solaris.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunc++.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons
+
+import os.path
+
+cplusplus = __import__('c++', globals(), locals(), [])
+
+# use the package installer tool lslpp to figure out where cppc and what
+# version of it is installed
+def get_cppc(env):
+ cxx = env.get('CXX', None)
+ if cxx:
+ cppcPath = os.path.dirname(cxx)
+ else:
+ cppcPath = None
+
+ cppcVersion = None
+
+ pkginfo = env.subst('$PKGINFO')
+ pkgchk = env.subst('$PKGCHK')
+
+ def look_pkg_db(pkginfo=pkginfo, pkgchk=pkgchk):
+ version = None
+ path = None
+ for package in ['SPROcpl']:
+ cmd = "%s -l %s 2>/dev/null | grep '^ *VERSION:'" % (pkginfo, package)
+ line = os.popen(cmd).readline()
+ if line:
+ version = line.split()[-1]
+ cmd = "%s -l %s 2>/dev/null | grep '^Pathname:.*/bin/CC$' | grep -v '/SC[0-9]*\.[0-9]*/'" % (pkgchk, package)
+ line = os.popen(cmd).readline()
+ if line:
+ path = os.path.dirname(line.split()[-1])
+ break
+
+ return path, version
+
+ path, version = look_pkg_db()
+ if path and version:
+ cppcPath, cppcVersion = path, version
+
+ return (cppcPath, 'CC', 'CC', cppcVersion)
+
+def generate(env):
+ """Add Builders and construction variables for SunPRO C++."""
+ path, cxx, shcxx, version = get_cppc(env)
+ if path:
+ cxx = os.path.join(path, cxx)
+ shcxx = os.path.join(path, shcxx)
+
+ cplusplus.generate(env)
+
+ env['CXX'] = cxx
+ env['SHCXX'] = shcxx
+ env['CXXVERSION'] = version
+ env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -KPIC')
+ env['SHOBJPREFIX'] = 'so_'
+ env['SHOBJSUFFIX'] = '.o'
+
+def exists(env):
+ path, cxx, shcxx, version = get_cppc(env)
+ if path and cxx:
+ cppc = os.path.join(path, cxx)
+ if os.path.exists(cppc):
+ return cppc
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/suncc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/suncc.py
new file mode 100644
index 000000000..be16238d8
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/suncc.py
@@ -0,0 +1,52 @@
+"""SCons.Tool.suncc
+
+Tool-specific initialization for Sun Solaris (Forte) CC and cc.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/suncc.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+import cc
+
+def generate(env):
+ """
+ Add Builders and construction variables for Forte C and C++ compilers
+ to an Environment.
+ """
+ cc.generate(env)
+
+ env['CXX'] = 'CC'
+ env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -KPIC')
+ env['SHOBJPREFIX'] = 'so_'
+ env['SHOBJSUFFIX'] = '.o'
+
+def exists(env):
+ return env.Detect('CC')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunf77.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf77.py
new file mode 100644
index 000000000..f8be78145
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf77.py
@@ -0,0 +1,57 @@
+"""SCons.Tool.sunf77
+
+Tool-specific initialization for sunf77, the Sun Studio F77 compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunf77.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+from FortranCommon import add_all_to_env
+
+compilers = ['sunf77', 'f77']
+
+def generate(env):
+ """Add Builders and construction variables for sunf77 to an Environment."""
+ add_all_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'f77'
+ env['FORTRAN'] = fcomp
+ env['F77'] = fcomp
+
+ env['SHFORTRAN'] = '$FORTRAN'
+ env['SHF77'] = '$F77'
+
+ env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')
+ env['SHF77FLAGS'] = SCons.Util.CLVar('$F77FLAGS -KPIC')
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunf90.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf90.py
new file mode 100644
index 000000000..152e22d03
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf90.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.sunf90
+
+Tool-specific initialization for sunf90, the Sun Studio F90 compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunf90.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+from FortranCommon import add_all_to_env
+
+compilers = ['sunf90', 'f90']
+
+def generate(env):
+ """Add Builders and construction variables for sun f90 compiler to an
+ Environment."""
+ add_all_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'f90'
+ env['FORTRAN'] = fcomp
+ env['F90'] = fcomp
+
+ env['SHFORTRAN'] = '$FORTRAN'
+ env['SHF90'] = '$F90'
+
+ env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')
+ env['SHF90FLAGS'] = SCons.Util.CLVar('$F90FLAGS -KPIC')
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunf95.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf95.py
new file mode 100644
index 000000000..74ea2daae
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunf95.py
@@ -0,0 +1,58 @@
+"""SCons.Tool.sunf95
+
+Tool-specific initialization for sunf95, the Sun Studio F95 compiler.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunf95.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Util
+
+from FortranCommon import add_all_to_env
+
+compilers = ['sunf95', 'f95']
+
+def generate(env):
+ """Add Builders and construction variables for sunf95 to an
+ Environment."""
+ add_all_to_env(env)
+
+ fcomp = env.Detect(compilers) or 'f95'
+ env['FORTRAN'] = fcomp
+ env['F95'] = fcomp
+
+ env['SHFORTRAN'] = '$FORTRAN'
+ env['SHF95'] = '$F95'
+
+ env['SHFORTRANFLAGS'] = SCons.Util.CLVar('$FORTRANFLAGS -KPIC')
+ env['SHF95FLAGS'] = SCons.Util.CLVar('$F95FLAGS -KPIC')
+
+def exists(env):
+ return env.Detect(compilers)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/sunlink.py b/tools/scons/scons-local-1.2.0/SCons/Tool/sunlink.py
new file mode 100644
index 000000000..7efa96f5f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/sunlink.py
@@ -0,0 +1,71 @@
+"""SCons.Tool.sunlink
+
+Tool-specific initialization for the Sun Solaris (Forte) linker.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/sunlink.py 3842 2008/12/20 22:59:52 scons"
+
+import os
+import os.path
+
+import SCons.Util
+
+import link
+
+ccLinker = None
+
+# search for the acc compiler and linker front end
+
+try:
+ dirs = os.listdir('/opt')
+except (IOError, OSError):
+ # Not being able to read the directory because it doesn't exist
+ # (IOError) or isn't readable (OSError) is okay.
+ dirs = []
+
+for d in dirs:
+ linker = '/opt/' + d + '/bin/CC'
+ if os.path.exists(linker):
+ ccLinker = linker
+ break
+
+def generate(env):
+ """Add Builders and construction variables for Forte to an Environment."""
+ link.generate(env)
+
+ env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G')
+
+ env.Append(LINKFLAGS=['$__RPATH'])
+ env['RPATHPREFIX'] = '-R'
+ env['RPATHSUFFIX'] = ''
+ env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
+
+def exists(env):
+ return ccLinker
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/swig.py b/tools/scons/scons-local-1.2.0/SCons/Tool/swig.py
new file mode 100644
index 000000000..000b80e04
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/swig.py
@@ -0,0 +1,118 @@
+"""SCons.Tool.swig
+
+Tool-specific initialization for swig.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/swig.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+
+import SCons.Action
+import SCons.Defaults
+import SCons.Scanner
+import SCons.Tool
+import SCons.Util
+
+SwigAction = SCons.Action.Action('$SWIGCOM', '$SWIGCOMSTR')
+
+def swigSuffixEmitter(env, source):
+ if '-c++' in SCons.Util.CLVar(env.subst("$SWIGFLAGS", source=source)):
+ return '$SWIGCXXFILESUFFIX'
+ else:
+ return '$SWIGCFILESUFFIX'
+
+# Match '%module test', as well as '%module(directors="1") test'
+_reModule = re.compile(r'%module(?:\s*\(.*\))?\s+(.+)')
+
+def _swigEmitter(target, source, env):
+ swigflags = env.subst("$SWIGFLAGS", target=target, source=source)
+ flags = SCons.Util.CLVar(swigflags)
+ for src in source:
+ src = str(src.rfile())
+ mnames = None
+ if "-python" in flags and "-noproxy" not in flags:
+ if mnames is None:
+ mnames = _reModule.findall(open(src).read())
+ target.extend(map(lambda m, d=target[0].dir:
+ d.File(m + ".py"), mnames))
+ if "-java" in flags:
+ if mnames is None:
+ mnames = _reModule.findall(open(src).read())
+ java_files = map(lambda m: [m + ".java", m + "JNI.java"], mnames)
+ java_files = SCons.Util.flatten(java_files)
+ outdir = env.subst('$SWIGOUTDIR', target=target, source=source)
+ if outdir:
+ java_files = map(lambda j, o=outdir: os.path.join(o, j), java_files)
+ java_files = map(env.fs.File, java_files)
+ for jf in java_files:
+ t_from_s = lambda t, p, s, x: t.dir
+ SCons.Util.AddMethod(jf, t_from_s, 'target_from_source')
+ target.extend(java_files)
+ return (target, source)
+
+def generate(env):
+ """Add Builders and construction variables for swig to an Environment."""
+ c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
+
+ c_file.suffix['.i'] = swigSuffixEmitter
+ cxx_file.suffix['.i'] = swigSuffixEmitter
+
+ c_file.add_action('.i', SwigAction)
+ c_file.add_emitter('.i', _swigEmitter)
+ cxx_file.add_action('.i', SwigAction)
+ cxx_file.add_emitter('.i', _swigEmitter)
+
+ java_file = SCons.Tool.CreateJavaFileBuilder(env)
+
+ java_file.suffix['.i'] = swigSuffixEmitter
+
+ java_file.add_action('.i', SwigAction)
+ java_file.add_emitter('.i', _swigEmitter)
+
+ env['SWIG'] = 'swig'
+ env['SWIGFLAGS'] = SCons.Util.CLVar('')
+ env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX'
+ env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX'
+ env['_SWIGOUTDIR'] = '${"-outdir " + str(SWIGOUTDIR)}'
+ env['SWIGPATH'] = []
+ env['SWIGINCPREFIX'] = '-I'
+ env['SWIGINCSUFFIX'] = ''
+ env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
+ env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES'
+
+ expr = '^[ \t]*%[ \t]*(?:include|import|extern)[ \t]*(<|"?)([^>\s"]+)(?:>|"?)'
+ scanner = SCons.Scanner.ClassicCPP("SWIGScan", ".i", "SWIGPATH", expr)
+
+ env.Append(SCANNERS = scanner)
+
+def exists(env):
+ return env.Detect(['swig'])
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/tar.py b/tools/scons/scons-local-1.2.0/SCons/Tool/tar.py
new file mode 100644
index 000000000..7d527ee61
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/tar.py
@@ -0,0 +1,67 @@
+"""SCons.Tool.tar
+
+Tool-specific initialization for tar.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/tar.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Action
+import SCons.Builder
+import SCons.Defaults
+import SCons.Node.FS
+import SCons.Util
+
+tars = ['tar', 'gtar']
+
+TarAction = SCons.Action.Action('$TARCOM', '$TARCOMSTR')
+
+TarBuilder = SCons.Builder.Builder(action = TarAction,
+ source_factory = SCons.Node.FS.Entry,
+ source_scanner = SCons.Defaults.DirScanner,
+ suffix = '$TARSUFFIX',
+ multi = 1)
+
+
+def generate(env):
+ """Add Builders and construction variables for tar to an Environment."""
+ try:
+ bld = env['BUILDERS']['Tar']
+ except KeyError:
+ bld = TarBuilder
+ env['BUILDERS']['Tar'] = bld
+
+ env['TAR'] = env.Detect(tars) or 'gtar'
+ env['TARFLAGS'] = SCons.Util.CLVar('-c')
+ env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES'
+ env['TARSUFFIX'] = '.tar'
+
+def exists(env):
+ return env.Detect(tars)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/tex.py b/tools/scons/scons-local-1.2.0/SCons/Tool/tex.py
new file mode 100644
index 000000000..5a664efda
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/tex.py
@@ -0,0 +1,661 @@
+"""SCons.Tool.tex
+
+Tool-specific initialization for TeX.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/tex.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import re
+import string
+import shutil
+
+import SCons.Action
+import SCons.Node
+import SCons.Node.FS
+import SCons.Util
+import SCons.Scanner.LaTeX
+
+Verbose = False
+
+must_rerun_latex = True
+
+# these are files that just need to be checked for changes and then rerun latex
+check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm']
+
+# these are files that require bibtex or makeindex to be run when they change
+all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo']
+
+#
+# regular expressions used to search for Latex features
+# or outputs that require rerunning latex
+#
+# search for all .aux files opened by latex (recorded in the .log file)
+openout_aux_re = re.compile(r"\\openout.*`(.*\.aux)'")
+
+#printindex_re = re.compile(r"^[^%]*\\printindex", re.MULTILINE)
+#printnomenclature_re = re.compile(r"^[^%]*\\printnomenclature", re.MULTILINE)
+#printglossary_re = re.compile(r"^[^%]*\\printglossary", re.MULTILINE)
+
+# search to find rerun warnings
+warning_rerun_str = '(^LaTeX Warning:.*Rerun)|(^Package \w+ Warning:.*Rerun)'
+warning_rerun_re = re.compile(warning_rerun_str, re.MULTILINE)
+
+# search to find citation rerun warnings
+rerun_citations_str = "^LaTeX Warning:.*\n.*Rerun to get citations correct"
+rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE)
+
+# search to find undefined references or citations warnings
+undefined_references_str = '(^LaTeX Warning:.*undefined references)|(^Package \w+ Warning:.*undefined citations)'
+undefined_references_re = re.compile(undefined_references_str, re.MULTILINE)
+
+# used by the emitter
+auxfile_re = re.compile(r".", re.MULTILINE)
+tableofcontents_re = re.compile(r"^[^%\n]*\\tableofcontents", re.MULTILINE)
+makeindex_re = re.compile(r"^[^%\n]*\\makeindex", re.MULTILINE)
+bibliography_re = re.compile(r"^[^%\n]*\\bibliography", re.MULTILINE)
+listoffigures_re = re.compile(r"^[^%\n]*\\listoffigures", re.MULTILINE)
+listoftables_re = re.compile(r"^[^%\n]*\\listoftables", re.MULTILINE)
+hyperref_re = re.compile(r"^[^%\n]*\\usepackage.*\{hyperref\}", re.MULTILINE)
+makenomenclature_re = re.compile(r"^[^%\n]*\\makenomenclature", re.MULTILINE)
+makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE)
+beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE)
+
+# search to find all files included by Latex
+include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE)
+
+# search to find all graphics files included by Latex
+includegraphics_re = re.compile(r'^[^%\n]*\\(?:includegraphics(?:\[[^\]]+\])?){([^}]*)}', re.MULTILINE)
+
+# search to find all files opened by Latex (recorded in .log file)
+openout_re = re.compile(r"\\openout.*`(.*)'")
+
+# list of graphics file extensions for TeX and LaTeX
+TexGraphics = SCons.Scanner.LaTeX.TexGraphics
+LatexGraphics = SCons.Scanner.LaTeX.LatexGraphics
+
+# An Action sufficient to build any generic tex file.
+TeXAction = None
+
+# An action to build a latex file. This action might be needed more
+# than once if we are dealing with labels and bibtex.
+LaTeXAction = None
+
+# An action to run BibTeX on a file.
+BibTeXAction = None
+
+# An action to run MakeIndex on a file.
+MakeIndexAction = None
+
+# An action to run MakeIndex (for nomencl) on a file.
+MakeNclAction = None
+
+# An action to run MakeIndex (for glossary) on a file.
+MakeGlossaryAction = None
+
+# Used as a return value of modify_env_var if the variable is not set.
+_null = SCons.Scanner.LaTeX._null
+
+modify_env_var = SCons.Scanner.LaTeX.modify_env_var
+
+def FindFile(name,suffixes,paths,env,requireExt=False):
+ if requireExt:
+ name = SCons.Util.splitext(name)[0]
+ if Verbose:
+ print " searching for '%s' with extensions: " % name,suffixes
+
+ for path in paths:
+ testName = os.path.join(path,name)
+ if Verbose:
+ print " look for '%s'" % testName
+ if os.path.exists(testName):
+ if Verbose:
+ print " found '%s'" % testName
+ return env.fs.File(testName)
+ else:
+ name_ext = SCons.Util.splitext(testName)[1]
+ if name_ext:
+ continue
+
+ # if no suffix try adding those passed in
+ for suffix in suffixes:
+ testNameExt = testName + suffix
+ if Verbose:
+ print " look for '%s'" % testNameExt
+
+ if os.path.exists(testNameExt):
+ if Verbose:
+ print " found '%s'" % testNameExt
+ return env.fs.File(testNameExt)
+ if Verbose:
+ print " did not find '%s'" % name
+ return None
+
+def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None):
+ """A builder for LaTeX files that checks the output in the aux file
+ and decides how many times to use LaTeXAction, and BibTeXAction."""
+
+ global must_rerun_latex
+
+ # This routine is called with two actions. In this file for DVI builds
+ # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction
+ # set this up now for the case where the user requests a different extension
+ # for the target filename
+ if (XXXLaTeXAction == LaTeXAction):
+ callerSuffix = ".dvi"
+ else:
+ callerSuffix = env['PDFSUFFIX']
+
+ basename = SCons.Util.splitext(str(source[0]))[0]
+ basedir = os.path.split(str(source[0]))[0]
+ basefile = os.path.split(str(basename))[1]
+ abspath = os.path.abspath(basedir)
+ targetext = os.path.splitext(str(target[0]))[1]
+ targetdir = os.path.split(str(target[0]))[0]
+
+ saved_env = {}
+ for var in SCons.Scanner.LaTeX.LaTeX.env_variables:
+ saved_env[var] = modify_env_var(env, var, abspath)
+
+ # Create base file names with the target directory since the auxiliary files
+ # will be made there. That's because the *COM variables have the cd
+ # command in the prolog. We check
+ # for the existence of files before opening them--even ones like the
+ # aux file that TeX always creates--to make it possible to write tests
+ # with stubs that don't necessarily generate all of the same files.
+
+ targetbase = os.path.join(targetdir, basefile)
+
+ # if there is a \makeindex there will be a .idx and thus
+ # we have to run makeindex at least once to keep the build
+ # happy even if there is no index.
+ # Same for glossaries and nomenclature
+ src_content = source[0].get_contents()
+ run_makeindex = makeindex_re.search(src_content) and not os.path.exists(targetbase + '.idx')
+ run_nomenclature = makenomenclature_re.search(src_content) and not os.path.exists(targetbase + '.nlo')
+ run_glossary = makeglossary_re.search(src_content) and not os.path.exists(targetbase + '.glo')
+
+ saved_hashes = {}
+ suffix_nodes = {}
+
+ for suffix in all_suffixes:
+ theNode = env.fs.File(targetbase + suffix)
+ suffix_nodes[suffix] = theNode
+ saved_hashes[suffix] = theNode.get_csig()
+
+ if Verbose:
+ print "hashes: ",saved_hashes
+
+ must_rerun_latex = True
+
+ #
+ # routine to update MD5 hash and compare
+ #
+ # TODO(1.5): nested scopes
+ def check_MD5(filenode, suffix, saved_hashes=saved_hashes, targetbase=targetbase):
+ global must_rerun_latex
+ # two calls to clear old csig
+ filenode.clear_memoized_values()
+ filenode.ninfo = filenode.new_ninfo()
+ new_md5 = filenode.get_csig()
+
+ if saved_hashes[suffix] == new_md5:
+ if Verbose:
+ print "file %s not changed" % (targetbase+suffix)
+ return False # unchanged
+ saved_hashes[suffix] = new_md5
+ must_rerun_latex = True
+ if Verbose:
+ print "file %s changed, rerunning Latex, new hash = " % (targetbase+suffix), new_md5
+ return True # changed
+
+ # generate the file name that latex will generate
+ resultfilename = targetbase + callerSuffix
+
+ count = 0
+
+ while (must_rerun_latex and count < int(env.subst('$LATEXRETRIES'))) :
+ result = XXXLaTeXAction(target, source, env)
+ if result != 0:
+ return result
+
+ count = count + 1
+
+ must_rerun_latex = False
+ # Decide if various things need to be run, or run again.
+
+ # Read the log file to find all .aux files
+ logfilename = targetbase + '.log'
+ logContent = ''
+ auxfiles = []
+ if os.path.exists(logfilename):
+ logContent = open(logfilename, "rb").read()
+ auxfiles = openout_aux_re.findall(logContent)
+
+ # Now decide if bibtex will need to be run.
+ # The information that bibtex reads from the .aux file is
+ # pass-independent. If we find (below) that the .bbl file is unchanged,
+ # then the last latex saw a correct bibliography.
+ # Therefore only do this on the first pass
+ if count == 1:
+ for auxfilename in auxfiles:
+ target_aux = os.path.join(targetdir, auxfilename)
+ if os.path.exists(target_aux):
+ content = open(target_aux, "rb").read()
+ if string.find(content, "bibdata") != -1:
+ if Verbose:
+ print "Need to run bibtex"
+ bibfile = env.fs.File(targetbase)
+ result = BibTeXAction(bibfile, bibfile, env)
+ if result != 0:
+ return result
+ must_rerun_latex = check_MD5(suffix_nodes['.bbl'],'.bbl')
+ break
+
+ # Now decide if latex will need to be run again due to index.
+ if check_MD5(suffix_nodes['.idx'],'.idx') or (count == 1 and run_makeindex):
+ # We must run makeindex
+ if Verbose:
+ print "Need to run makeindex"
+ idxfile = suffix_nodes['.idx']
+ result = MakeIndexAction(idxfile, idxfile, env)
+ if result != 0:
+ return result
+
+ # TO-DO: need to add a way for the user to extend this list for whatever
+ # auxiliary files they create in other (or their own) packages
+ # Harder is case is where an action needs to be called -- that should be rare (I hope?)
+
+ for index in check_suffixes:
+ check_MD5(suffix_nodes[index],index)
+
+ # Now decide if latex will need to be run again due to nomenclature.
+ if check_MD5(suffix_nodes['.nlo'],'.nlo') or (count == 1 and run_nomenclature):
+ # We must run makeindex
+ if Verbose:
+ print "Need to run makeindex for nomenclature"
+ nclfile = suffix_nodes['.nlo']
+ result = MakeNclAction(nclfile, nclfile, env)
+ if result != 0:
+ return result
+
+ # Now decide if latex will need to be run again due to glossary.
+ if check_MD5(suffix_nodes['.glo'],'.glo') or (count == 1 and run_glossary):
+ # We must run makeindex
+ if Verbose:
+ print "Need to run makeindex for glossary"
+ glofile = suffix_nodes['.glo']
+ result = MakeGlossaryAction(glofile, glofile, env)
+ if result != 0:
+ return result
+
+ # Now decide if latex needs to be run yet again to resolve warnings.
+ if warning_rerun_re.search(logContent):
+ must_rerun_latex = True
+ if Verbose:
+ print "rerun Latex due to latex or package rerun warning"
+
+ if rerun_citations_re.search(logContent):
+ must_rerun_latex = True
+ if Verbose:
+ print "rerun Latex due to 'Rerun to get citations correct' warning"
+
+ if undefined_references_re.search(logContent):
+ must_rerun_latex = True
+ if Verbose:
+ print "rerun Latex due to undefined references or citations"
+
+ if (count >= int(env.subst('$LATEXRETRIES')) and must_rerun_latex):
+ print "reached max number of retries on Latex ,",int(env.subst('$LATEXRETRIES'))
+# end of while loop
+
+ # rename Latex's output to what the target name is
+ if not (str(target[0]) == resultfilename and os.path.exists(resultfilename)):
+ if os.path.exists(resultfilename):
+ print "move %s to %s" % (resultfilename, str(target[0]), )
+ shutil.move(resultfilename,str(target[0]))
+
+ # Original comment (when TEXPICTS was not restored):
+ # The TEXPICTS enviroment variable is needed by a dvi -> pdf step
+ # later on Mac OSX so leave it
+ #
+ # It is also used when searching for pictures (implicit dependencies).
+ # Why not set the variable again in the respective builder instead
+ # of leaving local modifications in the environment? What if multiple
+ # latex builds in different directories need different TEXPICTS?
+ for var in SCons.Scanner.LaTeX.LaTeX.env_variables:
+ if var == 'TEXPICTS':
+ continue
+ if saved_env[var] is _null:
+ try:
+ del env['ENV'][var]
+ except KeyError:
+ pass # was never set
+ else:
+ env['ENV'][var] = saved_env[var]
+
+ return result
+
+def LaTeXAuxAction(target = None, source= None, env=None):
+ result = InternalLaTeXAuxAction( LaTeXAction, target, source, env )
+ return result
+
+LaTeX_re = re.compile("\\\\document(style|class)")
+
+def is_LaTeX(flist):
+ # Scan a file list to decide if it's TeX- or LaTeX-flavored.
+ for f in flist:
+ content = f.get_contents()
+ if LaTeX_re.search(content):
+ return 1
+ return 0
+
+def TeXLaTeXFunction(target = None, source= None, env=None):
+ """A builder for TeX and LaTeX that scans the source file to
+ decide the "flavor" of the source and then executes the appropriate
+ program."""
+ if is_LaTeX(source):
+ result = LaTeXAuxAction(target,source,env)
+ else:
+ result = TeXAction(target,source,env)
+ return result
+
+def TeXLaTeXStrFunction(target = None, source= None, env=None):
+ """A strfunction for TeX and LaTeX that scans the source file to
+ decide the "flavor" of the source and then returns the appropriate
+ command string."""
+ if env.GetOption("no_exec"):
+ if is_LaTeX(source):
+ result = env.subst('$LATEXCOM',0,target,source)+" ..."
+ else:
+ result = env.subst("$TEXCOM",0,target,source)+" ..."
+ else:
+ result = ''
+ return result
+
+def tex_eps_emitter(target, source, env):
+ """An emitter for TeX and LaTeX sources when
+ executing tex or latex. It will accept .ps and .eps
+ graphics files
+ """
+ (target, source) = tex_emitter_core(target, source, env, TexGraphics)
+
+ return (target, source)
+
+def tex_pdf_emitter(target, source, env):
+ """An emitter for TeX and LaTeX sources when
+ executing pdftex or pdflatex. It will accept graphics
+ files of types .pdf, .jpg, .png, .gif, and .tif
+ """
+ (target, source) = tex_emitter_core(target, source, env, LatexGraphics)
+
+ return (target, source)
+
+def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir):
+ # for theFile (a Node) update any file_tests and search for graphics files
+ # then find all included files and call ScanFiles for each of them
+ content = theFile.get_contents()
+ if Verbose:
+ print " scanning ",str(theFile)
+
+ for i in range(len(file_tests_search)):
+ if file_tests[i][0] == None:
+ file_tests[i][0] = file_tests_search[i].search(content)
+
+ # For each file see if any graphics files are included
+ # and set up target to create ,pdf graphic
+ # is this is in pdflatex toolchain
+ graphic_files = includegraphics_re.findall(content)
+ if Verbose:
+ print "graphics files in '%s': "%str(theFile),graphic_files
+ for graphFile in graphic_files:
+ graphicNode = FindFile(graphFile,graphics_extensions,paths,env,requireExt=True)
+ # if building with pdflatex see if we need to build the .pdf version of the graphic file
+ # I should probably come up with a better way to tell which builder we are using.
+ if graphics_extensions == LatexGraphics:
+ # see if we can build this graphics file by epstopdf
+ graphicSrc = FindFile(graphFile,TexGraphics,paths,env,requireExt=True)
+ # it seems that FindFile checks with no extension added
+ # so if the extension is included in the name then both searches find it
+ # we don't want to try to build a .pdf from a .pdf so make sure src!=file wanted
+ if (graphicSrc != None) and (graphicSrc != graphicNode):
+ if Verbose:
+ if graphicNode == None:
+ print "need to build '%s' by epstopdf %s -o %s" % (graphFile,graphicSrc,graphFile)
+ else:
+ print "no need to build '%s', but source file %s exists" % (graphicNode,graphicSrc)
+ graphicNode = env.PDF(graphicSrc)
+ env.Depends(target[0],graphicNode)
+
+ # recursively call this on each of the included files
+ inc_files = [ ]
+ inc_files.extend( include_re.findall(content) )
+ if Verbose:
+ print "files included by '%s': "%str(theFile),inc_files
+ # inc_files is list of file names as given. need to find them
+ # using TEXINPUTS paths.
+
+ for src in inc_files:
+ srcNode = srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requireExt=False)
+ if srcNode != None:
+ file_test = ScanFiles(srcNode, target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir)
+ if Verbose:
+ print " done scanning ",str(theFile)
+ return file_tests
+
+def tex_emitter_core(target, source, env, graphics_extensions):
+ """An emitter for TeX and LaTeX sources.
+ For LaTeX sources we try and find the common created files that
+ are needed on subsequent runs of latex to finish tables of contents,
+ bibliographies, indices, lists of figures, and hyperlink references.
+ """
+ targetbase = SCons.Util.splitext(str(target[0]))[0]
+ basename = SCons.Util.splitext(str(source[0]))[0]
+ basefile = os.path.split(str(basename))[1]
+
+ basedir = os.path.split(str(source[0]))[0]
+ targetdir = os.path.split(str(target[0]))[0]
+ abspath = os.path.abspath(basedir)
+ target[0].attributes.path = abspath
+
+ #
+ # file names we will make use of in searching the sources and log file
+ #
+ emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg'] + all_suffixes
+ auxfilename = targetbase + '.aux'
+ logfilename = targetbase + '.log'
+
+ env.SideEffect(auxfilename,target[0])
+ env.SideEffect(logfilename,target[0])
+ env.Clean(target[0],auxfilename)
+ env.Clean(target[0],logfilename)
+
+ content = source[0].get_contents()
+
+ idx_exists = os.path.exists(targetbase + '.idx')
+ nlo_exists = os.path.exists(targetbase + '.nlo')
+ glo_exists = os.path.exists(targetbase + '.glo')
+
+ # set up list with the regular expressions
+ # we use to find features used
+ file_tests_search = [auxfile_re,
+ makeindex_re,
+ bibliography_re,
+ tableofcontents_re,
+ listoffigures_re,
+ listoftables_re,
+ hyperref_re,
+ makenomenclature_re,
+ makeglossary_re,
+ beamer_re ]
+ # set up list with the file suffixes that need emitting
+ # when a feature is found
+ file_tests_suff = [['.aux'],
+ ['.idx', '.ind', '.ilg'],
+ ['.bbl', '.blg'],
+ ['.toc'],
+ ['.lof'],
+ ['.lot'],
+ ['.out'],
+ ['.nlo', '.nls', '.nlg'],
+ ['.glo', '.gls', '.glg'],
+ ['.nav', '.snm', '.out', '.toc'] ]
+ # build the list of lists
+ file_tests = []
+ for i in range(len(file_tests_search)):
+ file_tests.append( [None, file_tests_suff[i]] )
+
+ # TO-DO: need to add a way for the user to extend this list for whatever
+ # auxiliary files they create in other (or their own) packages
+
+ # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS']
+ savedpath = modify_env_var(env, 'TEXINPUTS', abspath)
+ paths = env['ENV']['TEXINPUTS']
+ if SCons.Util.is_List(paths):
+ pass
+ else:
+ # Split at os.pathsep to convert into absolute path
+ # TODO(1.5)
+ #paths = paths.split(os.pathsep)
+ paths = string.split(paths, os.pathsep)
+
+ # now that we have the path list restore the env
+ if savedpath is _null:
+ try:
+ del env['ENV']['TEXINPUTS']
+ except KeyError:
+ pass # was never set
+ else:
+ env['ENV']['TEXINPUTS'] = savedpath
+ if Verbose:
+ print "search path ",paths
+
+ file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir)
+
+ for (theSearch,suffix_list) in file_tests:
+ if theSearch:
+ for suffix in suffix_list:
+ env.SideEffect(targetbase + suffix,target[0])
+ env.Clean(target[0],targetbase + suffix)
+
+ # read log file to get all other files that latex creates and will read on the next pass
+ if os.path.exists(logfilename):
+ content = open(logfilename, "rb").read()
+ out_files = openout_re.findall(content)
+ env.SideEffect(out_files,target[0])
+ env.Clean(target[0],out_files)
+
+ return (target, source)
+
+
+TeXLaTeXAction = None
+
+def generate(env):
+ """Add Builders and construction variables for TeX to an Environment."""
+
+ # A generic tex file Action, sufficient for all tex files.
+ global TeXAction
+ if TeXAction is None:
+ TeXAction = SCons.Action.Action("$TEXCOM", "$TEXCOMSTR")
+
+ # An Action to build a latex file. This might be needed more
+ # than once if we are dealing with labels and bibtex.
+ global LaTeXAction
+ if LaTeXAction is None:
+ LaTeXAction = SCons.Action.Action("$LATEXCOM", "$LATEXCOMSTR")
+
+ # Define an action to run BibTeX on a file.
+ global BibTeXAction
+ if BibTeXAction is None:
+ BibTeXAction = SCons.Action.Action("$BIBTEXCOM", "$BIBTEXCOMSTR")
+
+ # Define an action to run MakeIndex on a file.
+ global MakeIndexAction
+ if MakeIndexAction is None:
+ MakeIndexAction = SCons.Action.Action("$MAKEINDEXCOM", "$MAKEINDEXCOMSTR")
+
+ # Define an action to run MakeIndex on a file for nomenclatures.
+ global MakeNclAction
+ if MakeNclAction is None:
+ MakeNclAction = SCons.Action.Action("$MAKENCLCOM", "$MAKENCLCOMSTR")
+
+ # Define an action to run MakeIndex on a file for glossaries.
+ global MakeGlossaryAction
+ if MakeGlossaryAction is None:
+ MakeGlossaryAction = SCons.Action.Action("$MAKEGLOSSARYCOM", "$MAKEGLOSSARYCOMSTR")
+
+ global TeXLaTeXAction
+ if TeXLaTeXAction is None:
+ TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction,
+ strfunction=TeXLaTeXStrFunction)
+
+ import dvi
+ dvi.generate(env)
+
+ bld = env['BUILDERS']['DVI']
+ bld.add_action('.tex', TeXLaTeXAction)
+ bld.add_emitter('.tex', tex_eps_emitter)
+
+ env['TEX'] = 'tex'
+ env['TEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['TEXCOM'] = 'cd ${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}'
+
+ # Duplicate from latex.py. If latex.py goes away, then this is still OK.
+ env['LATEX'] = 'latex'
+ env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}'
+ env['LATEXRETRIES'] = 3
+
+ env['BIBTEX'] = 'bibtex'
+ env['BIBTEXFLAGS'] = SCons.Util.CLVar('')
+ env['BIBTEXCOM'] = 'cd ${TARGET.dir} && $BIBTEX $BIBTEXFLAGS ${SOURCE.filebase}'
+
+ env['MAKEINDEX'] = 'makeindex'
+ env['MAKEINDEXFLAGS'] = SCons.Util.CLVar('')
+ env['MAKEINDEXCOM'] = 'cd ${TARGET.dir} && $MAKEINDEX $MAKEINDEXFLAGS ${SOURCE.file}'
+
+ env['MAKEGLOSSARY'] = 'makeindex'
+ env['MAKEGLOSSARYSTYLE'] = '${SOURCE.filebase}.ist'
+ env['MAKEGLOSSARYFLAGS'] = SCons.Util.CLVar('-s ${MAKEGLOSSARYSTYLE} -t ${SOURCE.filebase}.glg')
+ env['MAKEGLOSSARYCOM'] = 'cd ${TARGET.dir} && $MAKEGLOSSARY ${SOURCE.filebase}.glo $MAKEGLOSSARYFLAGS -o ${SOURCE.filebase}.gls'
+
+ env['MAKENCL'] = 'makeindex'
+ env['MAKENCLSTYLE'] = '$nomencl.ist'
+ env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg'
+ env['MAKENCLCOM'] = 'cd ${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $MAKENCLFLAGS -o ${SOURCE.filebase}.nls'
+
+ # Duplicate from pdflatex.py. If latex.py goes away, then this is still OK.
+ env['PDFLATEX'] = 'pdflatex'
+ env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode')
+ env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURCE.file}'
+
+def exists(env):
+ return env.Detect('tex')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/tlib.py b/tools/scons/scons-local-1.2.0/SCons/Tool/tlib.py
new file mode 100644
index 000000000..03fa99c00
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/tlib.py
@@ -0,0 +1,47 @@
+"""SCons.Tool.tlib
+
+XXX
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/tlib.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Tool
+import SCons.Tool.bcc32
+import SCons.Util
+
+def generate(env):
+ SCons.Tool.bcc32.findIt('tlib', env)
+ """Add Builders and construction variables for ar to an Environment."""
+ SCons.Tool.createStaticLibBuilder(env)
+ env['AR'] = 'tlib'
+ env['ARFLAGS'] = SCons.Util.CLVar('')
+ env['ARCOM'] = '$AR $TARGET $ARFLAGS /a $SOURCES'
+ env['LIBPREFIX'] = ''
+ env['LIBSUFFIX'] = '.lib'
+
+def exists(env):
+ return SCons.Tool.bcc32.findIt('tlib', env)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/wix.py b/tools/scons/scons-local-1.2.0/SCons/Tool/wix.py
new file mode 100644
index 000000000..16725e14a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/wix.py
@@ -0,0 +1,94 @@
+"""SCons.Tool.wix
+
+Tool-specific initialization for wix, the Windows Installer XML Tool.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/wix.py 3842 2008/12/20 22:59:52 scons"
+
+import SCons.Builder
+import SCons.Action
+import os
+import string
+
+def generate(env):
+ """Add Builders and construction variables for WiX to an Environment."""
+ if not exists(env):
+ return
+
+ env['WIXCANDLEFLAGS'] = ['-nologo']
+ env['WIXCANDLEINCLUDE'] = []
+ env['WIXCANDLECOM'] = '$WIXCANDLE $WIXCANDLEFLAGS -I $WIXCANDLEINCLUDE -o ${TARGET} ${SOURCE}'
+
+ env['WIXLIGHTFLAGS'].append( '-nologo' )
+ env['WIXLIGHTCOM'] = "$WIXLIGHT $WIXLIGHTFLAGS -out ${TARGET} ${SOURCES}"
+
+ object_builder = SCons.Builder.Builder(
+ action = '$WIXCANDLECOM',
+ suffix = '.wxiobj',
+ src_suffix = '.wxs')
+
+ linker_builder = SCons.Builder.Builder(
+ action = '$WIXLIGHTCOM',
+ src_suffix = '.wxiobj',
+ src_builder = object_builder)
+
+ env['BUILDERS']['WiX'] = linker_builder
+
+def exists(env):
+ env['WIXCANDLE'] = 'candle.exe'
+ env['WIXLIGHT'] = 'light.exe'
+
+ # try to find the candle.exe and light.exe tools and
+ # add the install directory to light libpath.
+ #for path in os.environ['PATH'].split(os.pathsep):
+ for path in string.split(os.environ['PATH'], os.pathsep):
+ if not path:
+ continue
+
+ # workaround for some weird python win32 bug.
+ if path[0] == '"' and path[-1:]=='"':
+ path = path[1:-1]
+
+ # normalize the path
+ path = os.path.normpath(path)
+
+ # search for the tools in the PATH environment variable
+ try:
+ if env['WIXCANDLE'] in os.listdir(path) and\
+ env['WIXLIGHT'] in os.listdir(path):
+ env.PrependENVPath('PATH', path)
+ env['WIXLIGHTFLAGS'] = [ os.path.join( path, 'wixui.wixlib' ),
+ '-loc',
+ os.path.join( path, 'WixUI_en-us.wxl' ) ]
+ return 1
+ except OSError:
+ pass # ignore this, could be a stale PATH entry.
+
+ return None
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/yacc.py b/tools/scons/scons-local-1.2.0/SCons/Tool/yacc.py
new file mode 100644
index 000000000..e06c477bd
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/yacc.py
@@ -0,0 +1,125 @@
+"""SCons.Tool.yacc
+
+Tool-specific initialization for yacc.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/yacc.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+
+import SCons.Defaults
+import SCons.Tool
+import SCons.Util
+
+YaccAction = SCons.Action.Action("$YACCCOM", "$YACCCOMSTR")
+
+def _yaccEmitter(target, source, env, ysuf, hsuf):
+ yaccflags = env.subst("$YACCFLAGS", target=target, source=source)
+ flags = SCons.Util.CLVar(yaccflags)
+ targetBase, targetExt = os.path.splitext(SCons.Util.to_String(target[0]))
+
+ if '.ym' in ysuf: # If using Objective-C
+ target = [targetBase + ".m"] # the extension is ".m".
+
+
+ # If -d is specified on the command line, yacc will emit a .h
+ # or .hpp file with the same name as the .c or .cpp output file.
+ if '-d' in flags:
+ target.append(targetBase + env.subst(hsuf, target=target, source=source))
+
+ # If -g is specified on the command line, yacc will emit a .vcg
+ # file with the same base name as the .y, .yacc, .ym or .yy file.
+ if "-g" in flags:
+ base, ext = os.path.splitext(SCons.Util.to_String(source[0]))
+ target.append(base + env.subst("$YACCVCGFILESUFFIX"))
+
+ # With --defines and --graph, the name of the file is totally defined
+ # in the options.
+ fileGenOptions = ["--defines=", "--graph="]
+ for option in flags:
+ for fileGenOption in fileGenOptions:
+ l = len(fileGenOption)
+ if option[:l] == fileGenOption:
+ # A file generating option is present, so add the file
+ # name to the list of targets.
+ fileName = string.strip(option[l:])
+ target.append(fileName)
+
+ return (target, source)
+
+def yEmitter(target, source, env):
+ return _yaccEmitter(target, source, env, ['.y', '.yacc'], '$YACCHFILESUFFIX')
+
+def ymEmitter(target, source, env):
+ return _yaccEmitter(target, source, env, ['.ym'], '$YACCHFILESUFFIX')
+
+def yyEmitter(target, source, env):
+ return _yaccEmitter(target, source, env, ['.yy'], '$YACCHXXFILESUFFIX')
+
+def generate(env):
+ """Add Builders and construction variables for yacc to an Environment."""
+ c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
+
+ # C
+ c_file.add_action('.y', YaccAction)
+ c_file.add_emitter('.y', yEmitter)
+
+ c_file.add_action('.yacc', YaccAction)
+ c_file.add_emitter('.yacc', yEmitter)
+
+ # Objective-C
+ c_file.add_action('.ym', YaccAction)
+ c_file.add_emitter('.ym', ymEmitter)
+
+ # C++
+ cxx_file.add_action('.yy', YaccAction)
+ cxx_file.add_emitter('.yy', yyEmitter)
+
+ env['YACC'] = env.Detect('bison') or 'yacc'
+ env['YACCFLAGS'] = SCons.Util.CLVar('')
+ env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES'
+ env['YACCHFILESUFFIX'] = '.h'
+
+ # Apparently, OS X now creates file.hpp like everybody else
+ # I have no idea when it changed; it was fixed in 10.4
+ #if env['PLATFORM'] == 'darwin':
+ # # Bison on Mac OS X just appends ".h" to the generated target .cc
+ # # or .cpp file name. Hooray for delayed expansion of variables.
+ # env['YACCHXXFILESUFFIX'] = '${TARGET.suffix}.h'
+ #else:
+ # env['YACCHXXFILESUFFIX'] = '.hpp'
+ env['YACCHXXFILESUFFIX'] = '.hpp'
+
+ env['YACCVCGFILESUFFIX'] = '.vcg'
+
+def exists(env):
+ return env.Detect(['bison', 'yacc'])
diff --git a/tools/scons/scons-local-1.2.0/SCons/Tool/zip.py b/tools/scons/scons-local-1.2.0/SCons/Tool/zip.py
new file mode 100644
index 000000000..5765fefac
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Tool/zip.py
@@ -0,0 +1,94 @@
+"""SCons.Tool.zip
+
+Tool-specific initialization for zip.
+
+There normally shouldn't be any need to import this module directly.
+It will usually be imported through the generic SCons.Tool.Tool()
+selection method.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Tool/zip.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+
+import SCons.Builder
+import SCons.Defaults
+import SCons.Node.FS
+import SCons.Util
+
+try:
+ import zipfile
+ internal_zip = 1
+except ImportError:
+ internal_zip = 0
+
+if internal_zip:
+ zipcompression = zipfile.ZIP_DEFLATED
+ def zip(target, source, env):
+ def visit(arg, dirname, names):
+ for name in names:
+ path = os.path.join(dirname, name)
+ if os.path.isfile(path):
+ arg.write(path)
+ compression = env.get('ZIPCOMPRESSION', 0)
+ zf = zipfile.ZipFile(str(target[0]), 'w', compression)
+ for s in source:
+ if s.isdir():
+ os.path.walk(str(s), visit, zf)
+ else:
+ zf.write(str(s))
+ zf.close()
+else:
+ zipcompression = 0
+ zip = "$ZIP $ZIPFLAGS ${TARGET.abspath} $SOURCES"
+
+
+zipAction = SCons.Action.Action(zip, varlist=['ZIPCOMPRESSION'])
+
+ZipBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$ZIPCOM', '$ZIPCOMSTR'),
+ source_factory = SCons.Node.FS.Entry,
+ source_scanner = SCons.Defaults.DirScanner,
+ suffix = '$ZIPSUFFIX',
+ multi = 1)
+
+
+def generate(env):
+ """Add Builders and construction variables for zip to an Environment."""
+ try:
+ bld = env['BUILDERS']['Zip']
+ except KeyError:
+ bld = ZipBuilder
+ env['BUILDERS']['Zip'] = bld
+
+ env['ZIP'] = 'zip'
+ env['ZIPFLAGS'] = SCons.Util.CLVar('')
+ env['ZIPCOM'] = zipAction
+ env['ZIPCOMPRESSION'] = zipcompression
+ env['ZIPSUFFIX'] = '.zip'
+
+def exists(env):
+ return internal_zip or env.Detect('zip')
diff --git a/tools/scons/scons-local-1.2.0/SCons/Util.py b/tools/scons/scons-local-1.2.0/SCons/Util.py
new file mode 100644
index 000000000..1f33f9757
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Util.py
@@ -0,0 +1,1577 @@
+"""SCons.Util
+
+Various utility functions go here.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Util.py 3842 2008/12/20 22:59:52 scons"
+
+import copy
+import os
+import os.path
+import re
+import string
+import sys
+import types
+
+from UserDict import UserDict
+from UserList import UserList
+from UserString import UserString
+
+# Don't "from types import ..." these because we need to get at the
+# types module later to look for UnicodeType.
+DictType = types.DictType
+InstanceType = types.InstanceType
+ListType = types.ListType
+StringType = types.StringType
+TupleType = types.TupleType
+
+def dictify(keys, values, result={}):
+ for k, v in zip(keys, values):
+ result[k] = v
+ return result
+
+_altsep = os.altsep
+if _altsep is None and sys.platform == 'win32':
+ # My ActivePython 2.0.1 doesn't set os.altsep! What gives?
+ _altsep = '/'
+if _altsep:
+ def rightmost_separator(path, sep, _altsep=_altsep):
+ rfind = string.rfind
+ return max(rfind(path, sep), rfind(path, _altsep))
+else:
+ rightmost_separator = string.rfind
+
+# First two from the Python Cookbook, just for completeness.
+# (Yeah, yeah, YAGNI...)
+def containsAny(str, set):
+ """Check whether sequence str contains ANY of the items in set."""
+ for c in set:
+ if c in str: return 1
+ return 0
+
+def containsAll(str, set):
+ """Check whether sequence str contains ALL of the items in set."""
+ for c in set:
+ if c not in str: return 0
+ return 1
+
+def containsOnly(str, set):
+ """Check whether sequence str contains ONLY items in set."""
+ for c in str:
+ if c not in set: return 0
+ return 1
+
+def splitext(path):
+ "Same as os.path.splitext() but faster."
+ sep = rightmost_separator(path, os.sep)
+ dot = string.rfind(path, '.')
+ # An ext is only real if it has at least one non-digit char
+ if dot > sep and not containsOnly(path[dot:], "0123456789."):
+ return path[:dot],path[dot:]
+ else:
+ return path,""
+
+def updrive(path):
+ """
+ Make the drive letter (if any) upper case.
+ This is useful because Windows is inconsitent on the case
+ of the drive letter, which can cause inconsistencies when
+ calculating command signatures.
+ """
+ drive, rest = os.path.splitdrive(path)
+ if drive:
+ path = string.upper(drive) + rest
+ return path
+
+class CallableComposite(UserList):
+ """A simple composite callable class that, when called, will invoke all
+ of its contained callables with the same arguments."""
+ def __call__(self, *args, **kwargs):
+ retvals = map(lambda x, args=args, kwargs=kwargs: apply(x,
+ args,
+ kwargs),
+ self.data)
+ if self.data and (len(self.data) == len(filter(callable, retvals))):
+ return self.__class__(retvals)
+ return NodeList(retvals)
+
+class NodeList(UserList):
+ """This class is almost exactly like a regular list of Nodes
+ (actually it can hold any object), with one important difference.
+ If you try to get an attribute from this list, it will return that
+ attribute from every item in the list. For example:
+
+ >>> someList = NodeList([ ' foo ', ' bar ' ])
+ >>> someList.strip()
+ [ 'foo', 'bar' ]
+ """
+ def __nonzero__(self):
+ return len(self.data) != 0
+
+ def __str__(self):
+ return string.join(map(str, self.data))
+
+ def __getattr__(self, name):
+ if not self.data:
+ # If there is nothing in the list, then we have no attributes to
+ # pass through, so raise AttributeError for everything.
+ raise AttributeError, "NodeList has no attribute: %s" % name
+
+ # Return a list of the attribute, gotten from every element
+ # in the list
+ attrList = map(lambda x, n=name: getattr(x, n), self.data)
+
+ # Special case. If the attribute is callable, we do not want
+ # to return a list of callables. Rather, we want to return a
+ # single callable that, when called, will invoke the function on
+ # all elements of this list.
+ if self.data and (len(self.data) == len(filter(callable, attrList))):
+ return CallableComposite(attrList)
+ return self.__class__(attrList)
+
+_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$')
+
+def get_environment_var(varstr):
+ """Given a string, first determine if it looks like a reference
+ to a single environment variable, like "$FOO" or "${FOO}".
+ If so, return that variable with no decorations ("FOO").
+ If not, return None."""
+ mo=_get_env_var.match(to_String(varstr))
+ if mo:
+ var = mo.group(1)
+ if var[0] == '{':
+ return var[1:-1]
+ else:
+ return var
+ else:
+ return None
+
+class DisplayEngine:
+ def __init__(self):
+ self.__call__ = self.print_it
+
+ def print_it(self, text, append_newline=1):
+ if append_newline: text = text + '\n'
+ try:
+ sys.stdout.write(text)
+ except IOError:
+ # Stdout might be connected to a pipe that has been closed
+ # by now. The most likely reason for the pipe being closed
+ # is that the user has press ctrl-c. It this is the case,
+ # then SCons is currently shutdown. We therefore ignore
+ # IOError's here so that SCons can continue and shutdown
+ # properly so that the .sconsign is correctly written
+ # before SCons exits.
+ pass
+
+ def dont_print(self, text, append_newline=1):
+ pass
+
+ def set_mode(self, mode):
+ if mode:
+ self.__call__ = self.print_it
+ else:
+ self.__call__ = self.dont_print
+
+def render_tree(root, child_func, prune=0, margin=[0], visited={}):
+ """
+ Render a tree of nodes into an ASCII tree view.
+ root - the root node of the tree
+ child_func - the function called to get the children of a node
+ prune - don't visit the same node twice
+ margin - the format of the left margin to use for children of root.
+ 1 results in a pipe, and 0 results in no pipe.
+ visited - a dictionary of visited nodes in the current branch if not prune,
+ or in the whole tree if prune.
+ """
+
+ rname = str(root)
+
+ children = child_func(root)
+ retval = ""
+ for pipe in margin[:-1]:
+ if pipe:
+ retval = retval + "| "
+ else:
+ retval = retval + " "
+
+ if visited.has_key(rname):
+ return retval + "+-[" + rname + "]\n"
+
+ retval = retval + "+-" + rname + "\n"
+ if not prune:
+ visited = copy.copy(visited)
+ visited[rname] = 1
+
+ for i in range(len(children)):
+ margin.append(i<len(children)-1)
+ retval = retval + render_tree(children[i], child_func, prune, margin, visited
+)
+ margin.pop()
+
+ return retval
+
+IDX = lambda N: N and 1 or 0
+
+def print_tree(root, child_func, prune=0, showtags=0, margin=[0], visited={}):
+ """
+ Print a tree of nodes. This is like render_tree, except it prints
+ lines directly instead of creating a string representation in memory,
+ so that huge trees can be printed.
+
+ root - the root node of the tree
+ child_func - the function called to get the children of a node
+ prune - don't visit the same node twice
+ showtags - print status information to the left of each node line
+ margin - the format of the left margin to use for children of root.
+ 1 results in a pipe, and 0 results in no pipe.
+ visited - a dictionary of visited nodes in the current branch if not prune,
+ or in the whole tree if prune.
+ """
+
+ rname = str(root)
+
+ if showtags:
+
+ if showtags == 2:
+ print ' E = exists'
+ print ' R = exists in repository only'
+ print ' b = implicit builder'
+ print ' B = explicit builder'
+ print ' S = side effect'
+ print ' P = precious'
+ print ' A = always build'
+ print ' C = current'
+ print ' N = no clean'
+ print ' H = no cache'
+ print ''
+
+ tags = ['[']
+ tags.append(' E'[IDX(root.exists())])
+ tags.append(' R'[IDX(root.rexists() and not root.exists())])
+ tags.append(' BbB'[[0,1][IDX(root.has_explicit_builder())] +
+ [0,2][IDX(root.has_builder())]])
+ tags.append(' S'[IDX(root.side_effect)])
+ tags.append(' P'[IDX(root.precious)])
+ tags.append(' A'[IDX(root.always_build)])
+ tags.append(' C'[IDX(root.is_up_to_date())])
+ tags.append(' N'[IDX(root.noclean)])
+ tags.append(' H'[IDX(root.nocache)])
+ tags.append(']')
+
+ else:
+ tags = []
+
+ def MMM(m):
+ return [" ","| "][m]
+ margins = map(MMM, margin[:-1])
+
+ children = child_func(root)
+
+ if prune and visited.has_key(rname) and children:
+ print string.join(tags + margins + ['+-[', rname, ']'], '')
+ return
+
+ print string.join(tags + margins + ['+-', rname], '')
+
+ visited[rname] = 1
+
+ if children:
+ margin.append(1)
+ map(lambda C, cf=child_func, p=prune, i=IDX(showtags), m=margin, v=visited:
+ print_tree(C, cf, p, i, m, v),
+ children[:-1])
+ margin[-1] = 0
+ print_tree(children[-1], child_func, prune, IDX(showtags), margin, visited)
+ margin.pop()
+
+
+
+# Functions for deciding if things are like various types, mainly to
+# handle UserDict, UserList and UserString like their underlying types.
+#
+# Yes, all of this manual testing breaks polymorphism, and the real
+# Pythonic way to do all of this would be to just try it and handle the
+# exception, but handling the exception when it's not the right type is
+# often too slow.
+
+try:
+ class mystr(str):
+ pass
+except TypeError:
+ # An older Python version without new-style classes.
+ #
+ # The actual implementations here have been selected after timings
+ # coded up in in bench/is_types.py (from the SCons source tree,
+ # see the scons-src distribution), mostly against Python 1.5.2.
+ # Key results from those timings:
+ #
+ # -- Storing the type of the object in a variable (t = type(obj))
+ # slows down the case where it's a native type and the first
+ # comparison will match, but nicely speeds up the case where
+ # it's a different native type. Since that's going to be
+ # common, it's a good tradeoff.
+ #
+ # -- The data show that calling isinstance() on an object that's
+ # a native type (dict, list or string) is expensive enough
+ # that checking up front for whether the object is of type
+ # InstanceType is a pretty big win, even though it does slow
+ # down the case where it really *is* an object instance a
+ # little bit.
+ def is_Dict(obj):
+ t = type(obj)
+ return t is DictType or \
+ (t is InstanceType and isinstance(obj, UserDict))
+
+ def is_List(obj):
+ t = type(obj)
+ return t is ListType \
+ or (t is InstanceType and isinstance(obj, UserList))
+
+ def is_Sequence(obj):
+ t = type(obj)
+ return t is ListType \
+ or t is TupleType \
+ or (t is InstanceType and isinstance(obj, UserList))
+
+ def is_Tuple(obj):
+ t = type(obj)
+ return t is TupleType
+
+ if hasattr(types, 'UnicodeType'):
+ def is_String(obj):
+ t = type(obj)
+ return t is StringType \
+ or t is UnicodeType \
+ or (t is InstanceType and isinstance(obj, UserString))
+ else:
+ def is_String(obj):
+ t = type(obj)
+ return t is StringType \
+ or (t is InstanceType and isinstance(obj, UserString))
+
+ def is_Scalar(obj):
+ return is_String(obj) or not is_Sequence(obj)
+
+ def flatten(obj, result=None):
+ """Flatten a sequence to a non-nested list.
+
+ Flatten() converts either a single scalar or a nested sequence
+ to a non-nested list. Note that flatten() considers strings
+ to be scalars instead of sequences like Python would.
+ """
+ if is_Scalar(obj):
+ return [obj]
+ if result is None:
+ result = []
+ for item in obj:
+ if is_Scalar(item):
+ result.append(item)
+ else:
+ flatten_sequence(item, result)
+ return result
+
+ def flatten_sequence(sequence, result=None):
+ """Flatten a sequence to a non-nested list.
+
+ Same as flatten(), but it does not handle the single scalar
+ case. This is slightly more efficient when one knows that
+ the sequence to flatten can not be a scalar.
+ """
+ if result is None:
+ result = []
+ for item in sequence:
+ if is_Scalar(item):
+ result.append(item)
+ else:
+ flatten_sequence(item, result)
+ return result
+
+ #
+ # Generic convert-to-string functions that abstract away whether or
+ # not the Python we're executing has Unicode support. The wrapper
+ # to_String_for_signature() will use a for_signature() method if the
+ # specified object has one.
+ #
+ if hasattr(types, 'UnicodeType'):
+ UnicodeType = types.UnicodeType
+ def to_String(s):
+ if isinstance(s, UserString):
+ t = type(s.data)
+ else:
+ t = type(s)
+ if t is UnicodeType:
+ return unicode(s)
+ else:
+ return str(s)
+ else:
+ to_String = str
+
+ def to_String_for_signature(obj):
+ try:
+ f = obj.for_signature
+ except AttributeError:
+ return to_String_for_subst(obj)
+ else:
+ return f()
+
+ def to_String_for_subst(s):
+ if is_Sequence( s ):
+ return string.join( map(to_String_for_subst, s) )
+
+ return to_String( s )
+
+else:
+ # A modern Python version with new-style classes, so we can just use
+ # isinstance().
+ #
+ # We are using the following trick to speed-up these
+ # functions. Default arguments are used to take a snapshot of the
+ # the global functions and constants used by these functions. This
+ # transforms accesses to global variable into local variables
+ # accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL).
+
+ DictTypes = (dict, UserDict)
+ ListTypes = (list, UserList)
+ SequenceTypes = (list, tuple, UserList)
+
+ # Empirically, Python versions with new-style classes all have
+ # unicode.
+ #
+ # Note that profiling data shows a speed-up when comparing
+ # explicitely with str and unicode instead of simply comparing
+ # with basestring. (at least on Python 2.5.1)
+ StringTypes = (str, unicode, UserString)
+
+ # Empirically, it is faster to check explicitely for str and
+ # unicode than for basestring.
+ BaseStringTypes = (str, unicode)
+
+ def is_Dict(obj, isinstance=isinstance, DictTypes=DictTypes):
+ return isinstance(obj, DictTypes)
+
+ def is_List(obj, isinstance=isinstance, ListTypes=ListTypes):
+ return isinstance(obj, ListTypes)
+
+ def is_Sequence(obj, isinstance=isinstance, SequenceTypes=SequenceTypes):
+ return isinstance(obj, SequenceTypes)
+
+ def is_Tuple(obj, isinstance=isinstance, tuple=tuple):
+ return isinstance(obj, tuple)
+
+ def is_String(obj, isinstance=isinstance, StringTypes=StringTypes):
+ return isinstance(obj, StringTypes)
+
+ def is_Scalar(obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes):
+ # Profiling shows that there is an impressive speed-up of 2x
+ # when explicitely checking for strings instead of just not
+ # sequence when the argument (i.e. obj) is already a string.
+ # But, if obj is a not string than it is twice as fast to
+ # check only for 'not sequence'. The following code therefore
+ # assumes that the obj argument is a string must of the time.
+ return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes)
+
+ def do_flatten(sequence, result, isinstance=isinstance,
+ StringTypes=StringTypes, SequenceTypes=SequenceTypes):
+ for item in sequence:
+ if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
+ result.append(item)
+ else:
+ do_flatten(item, result)
+
+ def flatten(obj, isinstance=isinstance, StringTypes=StringTypes,
+ SequenceTypes=SequenceTypes, do_flatten=do_flatten):
+ """Flatten a sequence to a non-nested list.
+
+ Flatten() converts either a single scalar or a nested sequence
+ to a non-nested list. Note that flatten() considers strings
+ to be scalars instead of sequences like Python would.
+ """
+ if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes):
+ return [obj]
+ result = []
+ for item in obj:
+ if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
+ result.append(item)
+ else:
+ do_flatten(item, result)
+ return result
+
+ def flatten_sequence(sequence, isinstance=isinstance, StringTypes=StringTypes,
+ SequenceTypes=SequenceTypes, do_flatten=do_flatten):
+ """Flatten a sequence to a non-nested list.
+
+ Same as flatten(), but it does not handle the single scalar
+ case. This is slightly more efficient when one knows that
+ the sequence to flatten can not be a scalar.
+ """
+ result = []
+ for item in sequence:
+ if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
+ result.append(item)
+ else:
+ do_flatten(item, result)
+ return result
+
+
+ #
+ # Generic convert-to-string functions that abstract away whether or
+ # not the Python we're executing has Unicode support. The wrapper
+ # to_String_for_signature() will use a for_signature() method if the
+ # specified object has one.
+ #
+ def to_String(s,
+ isinstance=isinstance, str=str,
+ UserString=UserString, BaseStringTypes=BaseStringTypes):
+ if isinstance(s,BaseStringTypes):
+ # Early out when already a string!
+ return s
+ elif isinstance(s, UserString):
+ # s.data can only be either a unicode or a regular
+ # string. Please see the UserString initializer.
+ return s.data
+ else:
+ return str(s)
+
+ def to_String_for_subst(s,
+ isinstance=isinstance, join=string.join, str=str, to_String=to_String,
+ BaseStringTypes=BaseStringTypes, SequenceTypes=SequenceTypes,
+ UserString=UserString):
+
+ # Note that the test cases are sorted by order of probability.
+ if isinstance(s, BaseStringTypes):
+ return s
+ elif isinstance(s, SequenceTypes):
+ l = []
+ for e in s:
+ l.append(to_String_for_subst(e))
+ return join( s )
+ elif isinstance(s, UserString):
+ # s.data can only be either a unicode or a regular
+ # string. Please see the UserString initializer.
+ return s.data
+ else:
+ return str(s)
+
+ def to_String_for_signature(obj, to_String_for_subst=to_String_for_subst,
+ AttributeError=AttributeError):
+ try:
+ f = obj.for_signature
+ except AttributeError:
+ return to_String_for_subst(obj)
+ else:
+ return f()
+
+
+
+# The SCons "semi-deep" copy.
+#
+# This makes separate copies of lists (including UserList objects)
+# dictionaries (including UserDict objects) and tuples, but just copies
+# references to anything else it finds.
+#
+# A special case is any object that has a __semi_deepcopy__() method,
+# which we invoke to create the copy, which is used by the BuilderDict
+# class because of its extra initialization argument.
+#
+# The dispatch table approach used here is a direct rip-off from the
+# normal Python copy module.
+
+_semi_deepcopy_dispatch = d = {}
+
+def _semi_deepcopy_dict(x):
+ copy = {}
+ for key, val in x.items():
+ # The regular Python copy.deepcopy() also deepcopies the key,
+ # as follows:
+ #
+ # copy[semi_deepcopy(key)] = semi_deepcopy(val)
+ #
+ # Doesn't seem like we need to, but we'll comment it just in case.
+ copy[key] = semi_deepcopy(val)
+ return copy
+d[types.DictionaryType] = _semi_deepcopy_dict
+
+def _semi_deepcopy_list(x):
+ return map(semi_deepcopy, x)
+d[types.ListType] = _semi_deepcopy_list
+
+def _semi_deepcopy_tuple(x):
+ return tuple(map(semi_deepcopy, x))
+d[types.TupleType] = _semi_deepcopy_tuple
+
+def _semi_deepcopy_inst(x):
+ if hasattr(x, '__semi_deepcopy__'):
+ return x.__semi_deepcopy__()
+ elif isinstance(x, UserDict):
+ return x.__class__(_semi_deepcopy_dict(x))
+ elif isinstance(x, UserList):
+ return x.__class__(_semi_deepcopy_list(x))
+ else:
+ return x
+d[types.InstanceType] = _semi_deepcopy_inst
+
+def semi_deepcopy(x):
+ copier = _semi_deepcopy_dispatch.get(type(x))
+ if copier:
+ return copier(x)
+ else:
+ return x
+
+
+
+class Proxy:
+ """A simple generic Proxy class, forwarding all calls to
+ subject. So, for the benefit of the python newbie, what does
+ this really mean? Well, it means that you can take an object, let's
+ call it 'objA', and wrap it in this Proxy class, with a statement
+ like this
+
+ proxyObj = Proxy(objA),
+
+ Then, if in the future, you do something like this
+
+ x = proxyObj.var1,
+
+ since Proxy does not have a 'var1' attribute (but presumably objA does),
+ the request actually is equivalent to saying
+
+ x = objA.var1
+
+ Inherit from this class to create a Proxy."""
+
+ def __init__(self, subject):
+ """Wrap an object as a Proxy object"""
+ self.__subject = subject
+
+ def __getattr__(self, name):
+ """Retrieve an attribute from the wrapped object. If the named
+ attribute doesn't exist, AttributeError is raised"""
+ return getattr(self.__subject, name)
+
+ def get(self):
+ """Retrieve the entire wrapped object"""
+ return self.__subject
+
+ def __cmp__(self, other):
+ if issubclass(other.__class__, self.__subject.__class__):
+ return cmp(self.__subject, other)
+ return cmp(self.__dict__, other.__dict__)
+
+# attempt to load the windows registry module:
+can_read_reg = 0
+try:
+ import _winreg
+
+ can_read_reg = 1
+ hkey_mod = _winreg
+
+ RegOpenKeyEx = _winreg.OpenKeyEx
+ RegEnumKey = _winreg.EnumKey
+ RegEnumValue = _winreg.EnumValue
+ RegQueryValueEx = _winreg.QueryValueEx
+ RegError = _winreg.error
+
+except ImportError:
+ try:
+ import win32api
+ import win32con
+ can_read_reg = 1
+ hkey_mod = win32con
+
+ RegOpenKeyEx = win32api.RegOpenKeyEx
+ RegEnumKey = win32api.RegEnumKey
+ RegEnumValue = win32api.RegEnumValue
+ RegQueryValueEx = win32api.RegQueryValueEx
+ RegError = win32api.error
+
+ except ImportError:
+ class _NoError(Exception):
+ pass
+ RegError = _NoError
+
+if can_read_reg:
+ HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT
+ HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE
+ HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER
+ HKEY_USERS = hkey_mod.HKEY_USERS
+
+ def RegGetValue(root, key):
+ """This utility function returns a value in the registry
+ without having to open the key first. Only available on
+ Windows platforms with a version of Python that can read the
+ registry. Returns the same thing as
+ SCons.Util.RegQueryValueEx, except you just specify the entire
+ path to the value, and don't have to bother opening the key
+ first. So:
+
+ Instead of:
+ k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'SOFTWARE\Microsoft\Windows\CurrentVersion')
+ out = SCons.Util.RegQueryValueEx(k,
+ 'ProgramFilesDir')
+
+ You can write:
+ out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
+ r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir')
+ """
+ # I would use os.path.split here, but it's not a filesystem
+ # path...
+ p = key.rfind('\\') + 1
+ keyp = key[:p]
+ val = key[p:]
+ k = RegOpenKeyEx(root, keyp)
+ return RegQueryValueEx(k,val)
+
+if sys.platform == 'win32':
+
+ def WhereIs(file, path=None, pathext=None, reject=[]):
+ if path is None:
+ try:
+ path = os.environ['PATH']
+ except KeyError:
+ return None
+ if is_String(path):
+ path = string.split(path, os.pathsep)
+ if pathext is None:
+ try:
+ pathext = os.environ['PATHEXT']
+ except KeyError:
+ pathext = '.COM;.EXE;.BAT;.CMD'
+ if is_String(pathext):
+ pathext = string.split(pathext, os.pathsep)
+ for ext in pathext:
+ if string.lower(ext) == string.lower(file[-len(ext):]):
+ pathext = ['']
+ break
+ if not is_List(reject) and not is_Tuple(reject):
+ reject = [reject]
+ for dir in path:
+ f = os.path.join(dir, file)
+ for ext in pathext:
+ fext = f + ext
+ if os.path.isfile(fext):
+ try:
+ reject.index(fext)
+ except ValueError:
+ return os.path.normpath(fext)
+ continue
+ return None
+
+elif os.name == 'os2':
+
+ def WhereIs(file, path=None, pathext=None, reject=[]):
+ if path is None:
+ try:
+ path = os.environ['PATH']
+ except KeyError:
+ return None
+ if is_String(path):
+ path = string.split(path, os.pathsep)
+ if pathext is None:
+ pathext = ['.exe', '.cmd']
+ for ext in pathext:
+ if string.lower(ext) == string.lower(file[-len(ext):]):
+ pathext = ['']
+ break
+ if not is_List(reject) and not is_Tuple(reject):
+ reject = [reject]
+ for dir in path:
+ f = os.path.join(dir, file)
+ for ext in pathext:
+ fext = f + ext
+ if os.path.isfile(fext):
+ try:
+ reject.index(fext)
+ except ValueError:
+ return os.path.normpath(fext)
+ continue
+ return None
+
+else:
+
+ def WhereIs(file, path=None, pathext=None, reject=[]):
+ import stat
+ if path is None:
+ try:
+ path = os.environ['PATH']
+ except KeyError:
+ return None
+ if is_String(path):
+ path = string.split(path, os.pathsep)
+ if not is_List(reject) and not is_Tuple(reject):
+ reject = [reject]
+ for d in path:
+ f = os.path.join(d, file)
+ if os.path.isfile(f):
+ try:
+ st = os.stat(f)
+ except OSError:
+ # os.stat() raises OSError, not IOError if the file
+ # doesn't exist, so in this case we let IOError get
+ # raised so as to not mask possibly serious disk or
+ # network issues.
+ continue
+ if stat.S_IMODE(st[stat.ST_MODE]) & 0111:
+ try:
+ reject.index(f)
+ except ValueError:
+ return os.path.normpath(f)
+ continue
+ return None
+
+def PrependPath(oldpath, newpath, sep = os.pathsep, delete_existing=1):
+ """This prepends newpath elements to the given oldpath. Will only
+ add any particular path once (leaving the first one it encounters
+ and ignoring the rest, to preserve path order), and will
+ os.path.normpath and os.path.normcase all paths to help assure
+ this. This can also handle the case where the given old path
+ variable is a list instead of a string, in which case a list will
+ be returned instead of a string.
+
+ Example:
+ Old Path: "/foo/bar:/foo"
+ New Path: "/biz/boom:/foo"
+ Result: "/biz/boom:/foo:/foo/bar"
+
+ If delete_existing is 0, then adding a path that exists will
+ not move it to the beginning; it will stay where it is in the
+ list.
+ """
+
+ orig = oldpath
+ is_list = 1
+ paths = orig
+ if not is_List(orig) and not is_Tuple(orig):
+ paths = string.split(paths, sep)
+ is_list = 0
+
+ if is_List(newpath) or is_Tuple(newpath):
+ newpaths = newpath
+ else:
+ newpaths = string.split(newpath, sep)
+
+ if not delete_existing:
+ # First uniquify the old paths, making sure to
+ # preserve the first instance (in Unix/Linux,
+ # the first one wins), and remembering them in normpaths.
+ # Then insert the new paths at the head of the list
+ # if they're not already in the normpaths list.
+ result = []
+ normpaths = []
+ for path in paths:
+ if not path:
+ continue
+ normpath = os.path.normpath(os.path.normcase(path))
+ if normpath not in normpaths:
+ result.append(path)
+ normpaths.append(normpath)
+ newpaths.reverse() # since we're inserting at the head
+ for path in newpaths:
+ if not path:
+ continue
+ normpath = os.path.normpath(os.path.normcase(path))
+ if normpath not in normpaths:
+ result.insert(0, path)
+ normpaths.append(normpath)
+ paths = result
+
+ else:
+ newpaths = newpaths + paths # prepend new paths
+
+ normpaths = []
+ paths = []
+ # now we add them only if they are unique
+ for path in newpaths:
+ normpath = os.path.normpath(os.path.normcase(path))
+ if path and not normpath in normpaths:
+ paths.append(path)
+ normpaths.append(normpath)
+
+ if is_list:
+ return paths
+ else:
+ return string.join(paths, sep)
+
+def AppendPath(oldpath, newpath, sep = os.pathsep, delete_existing=1):
+ """This appends new path elements to the given old path. Will
+ only add any particular path once (leaving the last one it
+ encounters and ignoring the rest, to preserve path order), and
+ will os.path.normpath and os.path.normcase all paths to help
+ assure this. This can also handle the case where the given old
+ path variable is a list instead of a string, in which case a list
+ will be returned instead of a string.
+
+ Example:
+ Old Path: "/foo/bar:/foo"
+ New Path: "/biz/boom:/foo"
+ Result: "/foo/bar:/biz/boom:/foo"
+
+ If delete_existing is 0, then adding a path that exists
+ will not move it to the end; it will stay where it is in the list.
+ """
+
+ orig = oldpath
+ is_list = 1
+ paths = orig
+ if not is_List(orig) and not is_Tuple(orig):
+ paths = string.split(paths, sep)
+ is_list = 0
+
+ if is_List(newpath) or is_Tuple(newpath):
+ newpaths = newpath
+ else:
+ newpaths = string.split(newpath, sep)
+
+ if not delete_existing:
+ # add old paths to result, then
+ # add new paths if not already present
+ # (I thought about using a dict for normpaths for speed,
+ # but it's not clear hashing the strings would be faster
+ # than linear searching these typically short lists.)
+ result = []
+ normpaths = []
+ for path in paths:
+ if not path:
+ continue
+ result.append(path)
+ normpaths.append(os.path.normpath(os.path.normcase(path)))
+ for path in newpaths:
+ if not path:
+ continue
+ normpath = os.path.normpath(os.path.normcase(path))
+ if normpath not in normpaths:
+ result.append(path)
+ normpaths.append(normpath)
+ paths = result
+ else:
+ # start w/ new paths, add old ones if not present,
+ # then reverse.
+ newpaths = paths + newpaths # append new paths
+ newpaths.reverse()
+
+ normpaths = []
+ paths = []
+ # now we add them only if they are unique
+ for path in newpaths:
+ normpath = os.path.normpath(os.path.normcase(path))
+ if path and not normpath in normpaths:
+ paths.append(path)
+ normpaths.append(normpath)
+ paths.reverse()
+
+ if is_list:
+ return paths
+ else:
+ return string.join(paths, sep)
+
+if sys.platform == 'cygwin':
+ def get_native_path(path):
+ """Transforms an absolute path into a native path for the system. In
+ Cygwin, this converts from a Cygwin path to a Windows one."""
+ return string.replace(os.popen('cygpath -w ' + path).read(), '\n', '')
+else:
+ def get_native_path(path):
+ """Transforms an absolute path into a native path for the system.
+ Non-Cygwin version, just leave the path alone."""
+ return path
+
+display = DisplayEngine()
+
+def Split(arg):
+ if is_List(arg) or is_Tuple(arg):
+ return arg
+ elif is_String(arg):
+ return string.split(arg)
+ else:
+ return [arg]
+
+class CLVar(UserList):
+ """A class for command-line construction variables.
+
+ This is a list that uses Split() to split an initial string along
+ white-space arguments, and similarly to split any strings that get
+ added. This allows us to Do the Right Thing with Append() and
+ Prepend() (as well as straight Python foo = env['VAR'] + 'arg1
+ arg2') regardless of whether a user adds a list or a string to a
+ command-line construction variable.
+ """
+ def __init__(self, seq = []):
+ UserList.__init__(self, Split(seq))
+ def __add__(self, other):
+ return UserList.__add__(self, CLVar(other))
+ def __radd__(self, other):
+ return UserList.__radd__(self, CLVar(other))
+ def __coerce__(self, other):
+ return (self, CLVar(other))
+ def __str__(self):
+ return string.join(self.data)
+
+# A dictionary that preserves the order in which items are added.
+# Submitted by David Benjamin to ActiveState's Python Cookbook web site:
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
+# Including fixes/enhancements from the follow-on discussions.
+class OrderedDict(UserDict):
+ def __init__(self, dict = None):
+ self._keys = []
+ UserDict.__init__(self, dict)
+
+ def __delitem__(self, key):
+ UserDict.__delitem__(self, key)
+ self._keys.remove(key)
+
+ def __setitem__(self, key, item):
+ UserDict.__setitem__(self, key, item)
+ if key not in self._keys: self._keys.append(key)
+
+ def clear(self):
+ UserDict.clear(self)
+ self._keys = []
+
+ def copy(self):
+ dict = OrderedDict()
+ dict.update(self)
+ return dict
+
+ def items(self):
+ return zip(self._keys, self.values())
+
+ def keys(self):
+ return self._keys[:]
+
+ def popitem(self):
+ try:
+ key = self._keys[-1]
+ except IndexError:
+ raise KeyError('dictionary is empty')
+
+ val = self[key]
+ del self[key]
+
+ return (key, val)
+
+ def setdefault(self, key, failobj = None):
+ UserDict.setdefault(self, key, failobj)
+ if key not in self._keys: self._keys.append(key)
+
+ def update(self, dict):
+ for (key, val) in dict.items():
+ self.__setitem__(key, val)
+
+ def values(self):
+ return map(self.get, self._keys)
+
+class Selector(OrderedDict):
+ """A callable ordered dictionary that maps file suffixes to
+ dictionary values. We preserve the order in which items are added
+ so that get_suffix() calls always return the first suffix added."""
+ def __call__(self, env, source):
+ try:
+ ext = source[0].suffix
+ except IndexError:
+ ext = ""
+ try:
+ return self[ext]
+ except KeyError:
+ # Try to perform Environment substitution on the keys of
+ # the dictionary before giving up.
+ s_dict = {}
+ for (k,v) in self.items():
+ if not k is None:
+ s_k = env.subst(k)
+ if s_dict.has_key(s_k):
+ # We only raise an error when variables point
+ # to the same suffix. If one suffix is literal
+ # and a variable suffix contains this literal,
+ # the literal wins and we don't raise an error.
+ raise KeyError, (s_dict[s_k][0], k, s_k)
+ s_dict[s_k] = (k,v)
+ try:
+ return s_dict[ext][1]
+ except KeyError:
+ try:
+ return self[None]
+ except KeyError:
+ return None
+
+
+if sys.platform == 'cygwin':
+ # On Cygwin, os.path.normcase() lies, so just report back the
+ # fact that the underlying Windows OS is case-insensitive.
+ def case_sensitive_suffixes(s1, s2):
+ return 0
+else:
+ def case_sensitive_suffixes(s1, s2):
+ return (os.path.normcase(s1) != os.path.normcase(s2))
+
+def adjustixes(fname, pre, suf, ensure_suffix=False):
+ if pre:
+ path, fn = os.path.split(os.path.normpath(fname))
+ if fn[:len(pre)] != pre:
+ fname = os.path.join(path, pre + fn)
+ # Only append a suffix if the suffix we're going to add isn't already
+ # there, and if either we've been asked to ensure the specific suffix
+ # is present or there's no suffix on it at all.
+ if suf and fname[-len(suf):] != suf and \
+ (ensure_suffix or not splitext(fname)[1]):
+ fname = fname + suf
+ return fname
+
+
+
+# From Tim Peters,
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
+# ASPN: Python Cookbook: Remove duplicates from a sequence
+# (Also in the printed Python Cookbook.)
+
+def unique(s):
+ """Return a list of the elements in s, but without duplicates.
+
+ For example, unique([1,2,3,1,2,3]) is some permutation of [1,2,3],
+ unique("abcabc") some permutation of ["a", "b", "c"], and
+ unique(([1, 2], [2, 3], [1, 2])) some permutation of
+ [[2, 3], [1, 2]].
+
+ For best speed, all sequence elements should be hashable. Then
+ unique() will usually work in linear time.
+
+ If not possible, the sequence elements should enjoy a total
+ ordering, and if list(s).sort() doesn't raise TypeError it's
+ assumed that they do enjoy a total ordering. Then unique() will
+ usually work in O(N*log2(N)) time.
+
+ If that's not possible either, the sequence elements must support
+ equality-testing. Then unique() will usually work in quadratic
+ time.
+ """
+
+ n = len(s)
+ if n == 0:
+ return []
+
+ # Try using a dict first, as that's the fastest and will usually
+ # work. If it doesn't work, it will usually fail quickly, so it
+ # usually doesn't cost much to *try* it. It requires that all the
+ # sequence elements be hashable, and support equality comparison.
+ u = {}
+ try:
+ for x in s:
+ u[x] = 1
+ except TypeError:
+ pass # move on to the next method
+ else:
+ return u.keys()
+ del u
+
+ # We can't hash all the elements. Second fastest is to sort,
+ # which brings the equal elements together; then duplicates are
+ # easy to weed out in a single pass.
+ # NOTE: Python's list.sort() was designed to be efficient in the
+ # presence of many duplicate elements. This isn't true of all
+ # sort functions in all languages or libraries, so this approach
+ # is more effective in Python than it may be elsewhere.
+ try:
+ t = list(s)
+ t.sort()
+ except TypeError:
+ pass # move on to the next method
+ else:
+ assert n > 0
+ last = t[0]
+ lasti = i = 1
+ while i < n:
+ if t[i] != last:
+ t[lasti] = last = t[i]
+ lasti = lasti + 1
+ i = i + 1
+ return t[:lasti]
+ del t
+
+ # Brute force is all that's left.
+ u = []
+ for x in s:
+ if x not in u:
+ u.append(x)
+ return u
+
+
+
+# From Alex Martelli,
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
+# ASPN: Python Cookbook: Remove duplicates from a sequence
+# First comment, dated 2001/10/13.
+# (Also in the printed Python Cookbook.)
+
+def uniquer(seq, idfun=None):
+ if idfun is None:
+ def idfun(x): return x
+ seen = {}
+ result = []
+ for item in seq:
+ marker = idfun(item)
+ # in old Python versions:
+ # if seen.has_key(marker)
+ # but in new ones:
+ if marker in seen: continue
+ seen[marker] = 1
+ result.append(item)
+ return result
+
+# A more efficient implementation of Alex's uniquer(), this avoids the
+# idfun() argument and function-call overhead by assuming that all
+# items in the sequence are hashable.
+
+def uniquer_hashables(seq):
+ seen = {}
+ result = []
+ for item in seq:
+ #if not item in seen:
+ if not seen.has_key(item):
+ seen[item] = 1
+ result.append(item)
+ return result
+
+
+
+# Much of the logic here was originally based on recipe 4.9 from the
+# Python CookBook, but we had to dumb it way down for Python 1.5.2.
+class LogicalLines:
+
+ def __init__(self, fileobj):
+ self.fileobj = fileobj
+
+ def readline(self):
+ result = []
+ while 1:
+ line = self.fileobj.readline()
+ if not line:
+ break
+ if line[-2:] == '\\\n':
+ result.append(line[:-2])
+ else:
+ result.append(line)
+ break
+ return string.join(result, '')
+
+ def readlines(self):
+ result = []
+ while 1:
+ line = self.readline()
+ if not line:
+ break
+ result.append(line)
+ return result
+
+
+
+class UniqueList(UserList):
+ def __init__(self, seq = []):
+ UserList.__init__(self, seq)
+ self.unique = True
+ def __make_unique(self):
+ if not self.unique:
+ self.data = uniquer_hashables(self.data)
+ self.unique = True
+ def __lt__(self, other):
+ self.__make_unique()
+ return UserList.__lt__(self, other)
+ def __le__(self, other):
+ self.__make_unique()
+ return UserList.__le__(self, other)
+ def __eq__(self, other):
+ self.__make_unique()
+ return UserList.__eq__(self, other)
+ def __ne__(self, other):
+ self.__make_unique()
+ return UserList.__ne__(self, other)
+ def __gt__(self, other):
+ self.__make_unique()
+ return UserList.__gt__(self, other)
+ def __ge__(self, other):
+ self.__make_unique()
+ return UserList.__ge__(self, other)
+ def __cmp__(self, other):
+ self.__make_unique()
+ return UserList.__cmp__(self, other)
+ def __len__(self):
+ self.__make_unique()
+ return UserList.__len__(self)
+ def __getitem__(self, i):
+ self.__make_unique()
+ return UserList.__getitem__(self, i)
+ def __setitem__(self, i, item):
+ UserList.__setitem__(self, i, item)
+ self.unique = False
+ def __getslice__(self, i, j):
+ self.__make_unique()
+ return UserList.__getslice__(self, i, j)
+ def __setslice__(self, i, j, other):
+ UserList.__setslice__(self, i, j, other)
+ self.unique = False
+ def __add__(self, other):
+ result = UserList.__add__(self, other)
+ result.unique = False
+ return result
+ def __radd__(self, other):
+ result = UserList.__radd__(self, other)
+ result.unique = False
+ return result
+ def __iadd__(self, other):
+ result = UserList.__iadd__(self, other)
+ result.unique = False
+ return result
+ def __mul__(self, other):
+ result = UserList.__mul__(self, other)
+ result.unique = False
+ return result
+ def __rmul__(self, other):
+ result = UserList.__rmul__(self, other)
+ result.unique = False
+ return result
+ def __imul__(self, other):
+ result = UserList.__imul__(self, other)
+ result.unique = False
+ return result
+ def append(self, item):
+ UserList.append(self, item)
+ self.unique = False
+ def insert(self, i):
+ UserList.insert(self, i)
+ self.unique = False
+ def count(self, item):
+ self.__make_unique()
+ return UserList.count(self, item)
+ def index(self, item):
+ self.__make_unique()
+ return UserList.index(self, item)
+ def reverse(self):
+ self.__make_unique()
+ UserList.reverse(self)
+ def sort(self, *args, **kwds):
+ self.__make_unique()
+ #return UserList.sort(self, *args, **kwds)
+ return apply(UserList.sort, (self,)+args, kwds)
+ def extend(self, other):
+ UserList.extend(self, other)
+ self.unique = False
+
+
+
+class Unbuffered:
+ """
+ A proxy class that wraps a file object, flushing after every write,
+ and delegating everything else to the wrapped object.
+ """
+ def __init__(self, file):
+ self.file = file
+ def write(self, arg):
+ try:
+ self.file.write(arg)
+ self.file.flush()
+ except IOError:
+ # Stdout might be connected to a pipe that has been closed
+ # by now. The most likely reason for the pipe being closed
+ # is that the user has press ctrl-c. It this is the case,
+ # then SCons is currently shutdown. We therefore ignore
+ # IOError's here so that SCons can continue and shutdown
+ # properly so that the .sconsign is correctly written
+ # before SCons exits.
+ pass
+ def __getattr__(self, attr):
+ return getattr(self.file, attr)
+
+def make_path_relative(path):
+ """ makes an absolute path name to a relative pathname.
+ """
+ if os.path.isabs(path):
+ drive_s,path = os.path.splitdrive(path)
+
+ import re
+ if not drive_s:
+ path=re.compile("/*(.*)").findall(path)[0]
+ else:
+ path=path[1:]
+
+ assert( not os.path.isabs( path ) ), path
+ return path
+
+
+
+# The original idea for AddMethod() and RenameFunction() come from the
+# following post to the ActiveState Python Cookbook:
+#
+# ASPN: Python Cookbook : Install bound methods in an instance
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/223613
+#
+# That code was a little fragile, though, so the following changes
+# have been wrung on it:
+#
+# * Switched the installmethod() "object" and "function" arguments,
+# so the order reflects that the left-hand side is the thing being
+# "assigned to" and the right-hand side is the value being assigned.
+#
+# * Changed explicit type-checking to the "try: klass = object.__class__"
+# block in installmethod() below so that it still works with the
+# old-style classes that SCons uses.
+#
+# * Replaced the by-hand creation of methods and functions with use of
+# the "new" module, as alluded to in Alex Martelli's response to the
+# following Cookbook post:
+#
+# ASPN: Python Cookbook : Dynamically added methods to a class
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/81732
+
+def AddMethod(object, function, name = None):
+ """
+ Adds either a bound method to an instance or an unbound method to
+ a class. If name is ommited the name of the specified function
+ is used by default.
+ Example:
+ a = A()
+ def f(self, x, y):
+ self.z = x + y
+ AddMethod(f, A, "add")
+ a.add(2, 4)
+ print a.z
+ AddMethod(lambda self, i: self.l[i], a, "listIndex")
+ print a.listIndex(5)
+ """
+ import new
+
+ if name is None:
+ name = function.func_name
+ else:
+ function = RenameFunction(function, name)
+
+ try:
+ klass = object.__class__
+ except AttributeError:
+ # "object" is really a class, so it gets an unbound method.
+ object.__dict__[name] = new.instancemethod(function, None, object)
+ else:
+ # "object" is really an instance, so it gets a bound method.
+ object.__dict__[name] = new.instancemethod(function, object, klass)
+
+def RenameFunction(function, name):
+ """
+ Returns a function identical to the specified function, but with
+ the specified name.
+ """
+ import new
+
+ # Compatibility for Python 1.5 and 2.1. Can be removed in favor of
+ # passing function.func_defaults directly to new.function() once
+ # we base on Python 2.2 or later.
+ func_defaults = function.func_defaults
+ if func_defaults is None:
+ func_defaults = ()
+
+ return new.function(function.func_code,
+ function.func_globals,
+ name,
+ func_defaults)
+
+
+md5 = False
+def MD5signature(s):
+ return str(s)
+
+def MD5filesignature(fname, chunksize=65536):
+ f = open(fname, "rb")
+ result = f.read()
+ f.close()
+ return result
+
+try:
+ import hashlib
+except ImportError:
+ pass
+else:
+ if hasattr(hashlib, 'md5'):
+ md5 = True
+ def MD5signature(s):
+ m = hashlib.md5()
+ m.update(str(s))
+ return m.hexdigest()
+
+ def MD5filesignature(fname, chunksize=65536):
+ m = hashlib.md5()
+ f = open(fname, "rb")
+ while 1:
+ blck = f.read(chunksize)
+ if not blck:
+ break
+ m.update(str(blck))
+ f.close()
+ return m.hexdigest()
+
+def MD5collect(signatures):
+ """
+ Collects a list of signatures into an aggregate signature.
+
+ signatures - a list of signatures
+ returns - the aggregate signature
+ """
+ if len(signatures) == 1:
+ return signatures[0]
+ else:
+ return MD5signature(string.join(signatures, ', '))
+
+
+
+# From Dinu C. Gherman,
+# Python Cookbook, second edition, recipe 6.17, p. 277.
+# Also:
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
+# ASPN: Python Cookbook: Null Object Design Pattern
+
+class Null:
+ """ Null objects always and reliably "do nothging." """
+
+ def __new__(cls, *args, **kwargs):
+ if not '_inst' in vars(cls):
+ #cls._inst = type.__new__(cls, *args, **kwargs)
+ cls._inst = apply(type.__new__, (cls,) + args, kwargs)
+ return cls._inst
+ def __init__(self, *args, **kwargs):
+ pass
+ def __call__(self, *args, **kwargs):
+ return self
+ def __repr__(self):
+ return "Null()"
+ def __nonzero__(self):
+ return False
+ def __getattr__(self, mname):
+ return self
+ def __setattr__(self, name, value):
+ return self
+ def __delattr__(self, name):
+ return self
+
+
+
+del __revision__
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/BoolVariable.py b/tools/scons/scons-local-1.2.0/SCons/Variables/BoolVariable.py
new file mode 100644
index 000000000..3aaf694c4
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/BoolVariable.py
@@ -0,0 +1,85 @@
+"""engine.SCons.Variables.BoolVariable
+
+This file defines the option type for SCons implementing true/false values.
+
+Usage example:
+
+ opts = Variables()
+ opts.Add(BoolVariable('embedded', 'build for an embedded system', 0))
+ ...
+ if env['embedded'] == 1:
+ ...
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/BoolVariable.py 3842 2008/12/20 22:59:52 scons"
+
+__all__ = ['BoolVariable',]
+
+import string
+
+import SCons.Errors
+
+__true_strings = ('y', 'yes', 'true', 't', '1', 'on' , 'all' )
+__false_strings = ('n', 'no', 'false', 'f', '0', 'off', 'none')
+
+
+def _text2bool(val):
+ """
+ Converts strings to True/False depending on the 'truth' expressed by
+ the string. If the string can't be converted, the original value
+ will be returned.
+
+ See '__true_strings' and '__false_strings' for values considered
+ 'true' or 'false respectivly.
+
+ This is usable as 'converter' for SCons' Variables.
+ """
+ lval = string.lower(val)
+ if lval in __true_strings: return True
+ if lval in __false_strings: return False
+ raise ValueError("Invalid value for boolean option: %s" % val)
+
+
+def _validator(key, val, env):
+ """
+ Validates the given value to be either '0' or '1'.
+
+ This is usable as 'validator' for SCons' Variables.
+ """
+ if not env[key] in (True, False):
+ raise SCons.Errors.UserError(
+ 'Invalid value for boolean option %s: %s' % (key, env[key]))
+
+
+def BoolVariable(key, help, default):
+ """
+ The input parameters describe a boolen option, thus they are
+ returned with the correct converter and validator appended. The
+ 'help' text will by appended by '(yes|no) to show the valid
+ valued. The result is usable for input to opts.Add().
+ """
+ return (key, '%s (yes|no)' % help, default,
+ _validator, _text2bool)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/EnumVariable.py b/tools/scons/scons-local-1.2.0/SCons/Variables/EnumVariable.py
new file mode 100644
index 000000000..6d2252ddf
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/EnumVariable.py
@@ -0,0 +1,101 @@
+"""engine.SCons.Variables.EnumVariable
+
+This file defines the option type for SCons allowing only specified
+input-values.
+
+Usage example:
+
+ opts = Variables()
+ opts.Add(EnumVariable('debug', 'debug output and symbols', 'no',
+ allowed_values=('yes', 'no', 'full'),
+ map={}, ignorecase=2))
+ ...
+ if env['debug'] == 'full':
+ ...
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/EnumVariable.py 3842 2008/12/20 22:59:52 scons"
+
+__all__ = ['EnumVariable',]
+
+import string
+
+import SCons.Errors
+
+def _validator(key, val, env, vals):
+ if not val in vals:
+ raise SCons.Errors.UserError(
+ 'Invalid value for option %s: %s' % (key, val))
+
+
+def EnumVariable(key, help, default, allowed_values, map={}, ignorecase=0):
+ """
+ The input parameters describe a option with only certain values
+ allowed. They are returned with an appropriate converter and
+ validator appended. The result is usable for input to
+ Variables.Add().
+
+ 'key' and 'default' are the values to be passed on to Variables.Add().
+
+ 'help' will be appended by the allowed values automatically
+
+ 'allowed_values' is a list of strings, which are allowed as values
+ for this option.
+
+ The 'map'-dictionary may be used for converting the input value
+ into canonical values (eg. for aliases).
+
+ 'ignorecase' defines the behaviour of the validator:
+
+ If ignorecase == 0, the validator/converter are case-sensitive.
+ If ignorecase == 1, the validator/converter are case-insensitive.
+ If ignorecase == 2, the validator/converter is case-insensitive and
+ the converted value will always be lower-case.
+
+ The 'validator' tests whether the value is in the list of allowed
+ values. The 'converter' converts input values according to the
+ given 'map'-dictionary (unmapped input values are returned
+ unchanged).
+ """
+ help = '%s (%s)' % (help, string.join(allowed_values, '|'))
+ # define validator
+ if ignorecase >= 1:
+ validator = lambda key, val, env, vals=allowed_values: \
+ _validator(key, string.lower(val), env, vals)
+ else:
+ validator = lambda key, val, env, vals=allowed_values: \
+ _validator(key, val, env, vals)
+ # define converter
+ if ignorecase == 2:
+ converter = lambda val, map=map: \
+ string.lower(map.get(string.lower(val), val))
+ elif ignorecase == 1:
+ converter = lambda val, map=map: \
+ map.get(string.lower(val), val)
+ else:
+ converter = lambda val, map=map: \
+ map.get(val, val)
+ return (key, help, default, validator, converter)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/ListVariable.py b/tools/scons/scons-local-1.2.0/SCons/Variables/ListVariable.py
new file mode 100644
index 000000000..72e6fec4e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/ListVariable.py
@@ -0,0 +1,133 @@
+"""engine.SCons.Variables.ListVariable
+
+This file defines the option type for SCons implementing 'lists'.
+
+A 'list' option may either be 'all', 'none' or a list of names
+separated by comma. After the option has been processed, the option
+value holds either the named list elements, all list elemens or no
+list elements at all.
+
+Usage example:
+
+ list_of_libs = Split('x11 gl qt ical')
+
+ opts = Variables()
+ opts.Add(ListVariable('shared',
+ 'libraries to build as shared libraries',
+ 'all',
+ elems = list_of_libs))
+ ...
+ for lib in list_of_libs:
+ if lib in env['shared']:
+ env.SharedObject(...)
+ else:
+ env.Object(...)
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/ListVariable.py 3842 2008/12/20 22:59:52 scons"
+
+# Know Bug: This should behave like a Set-Type, but does not really,
+# since elements can occur twice.
+
+__all__ = ['ListVariable',]
+
+import string
+import UserList
+
+import SCons.Util
+
+
+class _ListVariable(UserList.UserList):
+ def __init__(self, initlist=[], allowedElems=[]):
+ UserList.UserList.__init__(self, filter(None, initlist))
+ self.allowedElems = allowedElems[:]
+ self.allowedElems.sort()
+
+ def __cmp__(self, other):
+ raise NotImplementedError
+ def __eq__(self, other):
+ raise NotImplementedError
+ def __ge__(self, other):
+ raise NotImplementedError
+ def __gt__(self, other):
+ raise NotImplementedError
+ def __le__(self, other):
+ raise NotImplementedError
+ def __lt__(self, other):
+ raise NotImplementedError
+ def __str__(self):
+ if len(self) == 0:
+ return 'none'
+ self.data.sort()
+ if self.data == self.allowedElems:
+ return 'all'
+ else:
+ return string.join(self, ',')
+ def prepare_to_store(self):
+ return self.__str__()
+
+def _converter(val, allowedElems, mapdict):
+ """
+ """
+ if val == 'none':
+ val = []
+ elif val == 'all':
+ val = allowedElems
+ else:
+ val = filter(None, string.split(val, ','))
+ val = map(lambda v, m=mapdict: m.get(v, v), val)
+ notAllowed = filter(lambda v, aE=allowedElems: not v in aE, val)
+ if notAllowed:
+ raise ValueError("Invalid value(s) for option: %s" %
+ string.join(notAllowed, ','))
+ return _ListVariable(val, allowedElems)
+
+
+## def _validator(key, val, env):
+## """
+## """
+## # todo: write validater for pgk list
+## return 1
+
+
+def ListVariable(key, help, default, names, map={}):
+ """
+ The input parameters describe a 'package list' option, thus they
+ are returned with the correct converter and validater appended. The
+ result is usable for input to opts.Add() .
+
+ A 'package list' option may either be 'all', 'none' or a list of
+ package names (separated by space).
+ """
+ names_str = 'allowed names: %s' % string.join(names, ' ')
+ if SCons.Util.is_List(default):
+ default = string.join(default, ',')
+ help = string.join(
+ (help, '(all|none|comma-separated list of names)', names_str),
+ '\n ')
+ return (key, help, default,
+ None, #_validator,
+ lambda val, elems=names, m=map: _converter(val, elems, m))
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/PackageVariable.py b/tools/scons/scons-local-1.2.0/SCons/Variables/PackageVariable.py
new file mode 100644
index 000000000..5823bf568
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/PackageVariable.py
@@ -0,0 +1,103 @@
+"""engine.SCons.Variables.PackageVariable
+
+This file defines the option type for SCons implementing 'package
+activation'.
+
+To be used whenever a 'package' may be enabled/disabled and the
+package path may be specified.
+
+Usage example:
+
+ Examples:
+ x11=no (disables X11 support)
+ x11=yes (will search for the package installation dir)
+ x11=/usr/local/X11 (will check this path for existance)
+
+ To replace autoconf's --with-xxx=yyy
+
+ opts = Variables()
+ opts.Add(PackageVariable('x11',
+ 'use X11 installed here (yes = search some places',
+ 'yes'))
+ ...
+ if env['x11'] == True:
+ dir = ... search X11 in some standard places ...
+ env['x11'] = dir
+ if env['x11']:
+ ... build with x11 ...
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/PackageVariable.py 3842 2008/12/20 22:59:52 scons"
+
+__all__ = ['PackageVariable',]
+
+import string
+
+import SCons.Errors
+
+__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search')
+__disable_strings = ('0', 'no', 'false', 'off', 'disable')
+
+def _converter(val):
+ """
+ """
+ lval = string.lower(val)
+ if lval in __enable_strings: return True
+ if lval in __disable_strings: return False
+ #raise ValueError("Invalid value for boolean option: %s" % val)
+ return val
+
+
+def _validator(key, val, env, searchfunc):
+ # NB: searchfunc is currenty undocumented and unsupported
+ """
+ """
+ # todo: write validator, check for path
+ import os
+ if env[key] is True:
+ if searchfunc:
+ env[key] = searchfunc(key, val)
+ elif env[key] and not os.path.exists(val):
+ raise SCons.Errors.UserError(
+ 'Path does not exist for option %s: %s' % (key, val))
+
+
+def PackageVariable(key, help, default, searchfunc=None):
+ # NB: searchfunc is currenty undocumented and unsupported
+ """
+ The input parameters describe a 'package list' option, thus they
+ are returned with the correct converter and validator appended. The
+ result is usable for input to opts.Add() .
+
+ A 'package list' option may either be 'all', 'none' or a list of
+ package names (seperated by space).
+ """
+ help = string.join(
+ (help, '( yes | no | /path/to/%s )' % key),
+ '\n ')
+ return (key, help, default,
+ lambda k, v, e, f=searchfunc: _validator(k,v,e,f),
+ _converter)
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/PathVariable.py b/tools/scons/scons-local-1.2.0/SCons/Variables/PathVariable.py
new file mode 100644
index 000000000..752e34790
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/PathVariable.py
@@ -0,0 +1,141 @@
+"""SCons.Variables.PathVariable
+
+This file defines an option type for SCons implementing path settings.
+
+To be used whenever a a user-specified path override should be allowed.
+
+Arguments to PathVariable are:
+ option-name = name of this option on the command line (e.g. "prefix")
+ option-help = help string for option
+ option-dflt = default value for this option
+ validator = [optional] validator for option value. Predefined
+ validators are:
+
+ PathAccept -- accepts any path setting; no validation
+ PathIsDir -- path must be an existing directory
+ PathIsDirCreate -- path must be a dir; will create
+ PathIsFile -- path must be a file
+ PathExists -- path must exist (any type) [default]
+
+ The validator is a function that is called and which
+ should return True or False to indicate if the path
+ is valid. The arguments to the validator function
+ are: (key, val, env). The key is the name of the
+ option, the val is the path specified for the option,
+ and the env is the env to which the Otions have been
+ added.
+
+Usage example:
+
+ Examples:
+ prefix=/usr/local
+
+ opts = Variables()
+
+ opts = Variables()
+ opts.Add(PathVariable('qtdir',
+ 'where the root of Qt is installed',
+ qtdir, PathIsDir))
+ opts.Add(PathVariable('qt_includes',
+ 'where the Qt includes are installed',
+ '$qtdir/includes', PathIsDirCreate))
+ opts.Add(PathVariable('qt_libraries',
+ 'where the Qt library is installed',
+ '$qtdir/lib'))
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/PathVariable.py 3842 2008/12/20 22:59:52 scons"
+
+__all__ = ['PathVariable',]
+
+import os
+import os.path
+
+import SCons.Errors
+
+class _PathVariableClass:
+
+ def PathAccept(self, key, val, env):
+ """Accepts any path, no checking done."""
+ pass
+
+ def PathIsDir(self, key, val, env):
+ """Validator to check if Path is a directory."""
+ if not os.path.isdir(val):
+ if os.path.isfile(val):
+ m = 'Directory path for option %s is a file: %s'
+ else:
+ m = 'Directory path for option %s does not exist: %s'
+ raise SCons.Errors.UserError(m % (key, val))
+
+ def PathIsDirCreate(self, key, val, env):
+ """Validator to check if Path is a directory,
+ creating it if it does not exist."""
+ if os.path.isfile(val):
+ m = 'Path for option %s is a file, not a directory: %s'
+ raise SCons.Errors.UserError(m % (key, val))
+ if not os.path.isdir(val):
+ os.makedirs(val)
+
+ def PathIsFile(self, key, val, env):
+ """validator to check if Path is a file"""
+ if not os.path.isfile(val):
+ if os.path.isdir(val):
+ m = 'File path for option %s is a directory: %s'
+ else:
+ m = 'File path for option %s does not exist: %s'
+ raise SCons.Errors.UserError(m % (key, val))
+
+ def PathExists(self, key, val, env):
+ """validator to check if Path exists"""
+ if not os.path.exists(val):
+ m = 'Path for option %s does not exist: %s'
+ raise SCons.Errors.UserError(m % (key, val))
+
+ def __call__(self, key, help, default, validator=None):
+ # NB: searchfunc is currenty undocumented and unsupported
+ """
+ The input parameters describe a 'path list' option, thus they
+ are returned with the correct converter and validator appended. The
+ result is usable for input to opts.Add() .
+
+ The 'default' option specifies the default path to use if the
+ user does not specify an override with this option.
+
+ validator is a validator, see this file for examples
+ """
+ if validator is None:
+ validator = self.PathExists
+
+ if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
+ return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
+ validator, None)
+ else:
+ return (key, '%s ( /path/to/%s )' % (help, key), default,
+ validator, None)
+
+PathVariable = _PathVariableClass()
diff --git a/tools/scons/scons-local-1.2.0/SCons/Variables/__init__.py b/tools/scons/scons-local-1.2.0/SCons/Variables/__init__.py
new file mode 100644
index 000000000..5d73e10dd
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Variables/__init__.py
@@ -0,0 +1,304 @@
+"""engine.SCons.Variables
+
+This file defines the Variables class that is used to add user-friendly
+customizable variables to an SCons build.
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/Variables/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+import os.path
+import string
+import sys
+
+import SCons.Environment
+import SCons.Errors
+import SCons.Util
+import SCons.Warnings
+
+from BoolVariable import BoolVariable # okay
+from EnumVariable import EnumVariable # okay
+from ListVariable import ListVariable # naja
+from PackageVariable import PackageVariable # naja
+from PathVariable import PathVariable # okay
+
+
+class Variables:
+ instance=None
+
+ """
+ Holds all the options, updates the environment with the variables,
+ and renders the help text.
+ """
+ def __init__(self, files=[], args={}, is_global=1):
+ """
+ files - [optional] List of option configuration files to load
+ (backward compatibility) If a single string is passed it is
+ automatically placed in a file list
+ """
+ self.options = []
+ self.args = args
+ if not SCons.Util.is_List(files):
+ if files:
+ files = [ files ]
+ else:
+ files = []
+ self.files = files
+ self.unknown = {}
+
+ # create the singleton instance
+ if is_global:
+ self=Variables.instance
+
+ if not Variables.instance:
+ Variables.instance=self
+
+ def _do_add(self, key, help="", default=None, validator=None, converter=None):
+ class Variable:
+ pass
+
+ option = Variable()
+
+ # if we get a list or a tuple, we take the first element as the
+ # option key and store the remaining in aliases.
+ if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
+ option.key = key[0]
+ option.aliases = key[1:]
+ else:
+ option.key = key
+ option.aliases = [ key ]
+ option.help = help
+ option.default = default
+ option.validator = validator
+ option.converter = converter
+
+ self.options.append(option)
+
+ def keys(self):
+ """
+ Returns the keywords for the options
+ """
+ return map(lambda o: o.key, self.options)
+
+ def Add(self, key, help="", default=None, validator=None, converter=None, **kw):
+ """
+ Add an option.
+
+ key - the name of the variable, or a list or tuple of arguments
+ help - optional help text for the options
+ default - optional default value
+ validator - optional function that is called to validate the option's value
+ Called with (key, value, environment)
+ converter - optional function that is called to convert the option's value before
+ putting it in the environment.
+ """
+
+ if SCons.Util.is_List(key) or type(key) == type(()):
+ apply(self._do_add, key)
+ return
+
+ if not SCons.Util.is_String(key) or \
+ not SCons.Environment.is_valid_construction_var(key):
+ raise SCons.Errors.UserError, "Illegal Variables.Add() key `%s'" % str(key)
+
+ self._do_add(key, help, default, validator, converter)
+
+ def AddVariables(self, *optlist):
+ """
+ Add a list of options.
+
+ Each list element is a tuple/list of arguments to be passed on
+ to the underlying method for adding options.
+
+ Example:
+ opt.AddVariables(
+ ('debug', '', 0),
+ ('CC', 'The C compiler'),
+ ('VALIDATE', 'An option for testing validation', 'notset',
+ validator, None),
+ )
+ """
+ for o in optlist:
+ apply(self._do_add, o)
+
+
+ def Update(self, env, args=None):
+ """
+ Update an environment with the option variables.
+
+ env - the environment to update.
+ """
+
+ values = {}
+
+ # first set the defaults:
+ for option in self.options:
+ if not option.default is None:
+ values[option.key] = option.default
+
+ # next set the value specified in the options file
+ for filename in self.files:
+ if os.path.exists(filename):
+ dir = os.path.split(os.path.abspath(filename))[0]
+ if dir:
+ sys.path.insert(0, dir)
+ try:
+ values['__name__'] = filename
+ execfile(filename, {}, values)
+ finally:
+ if dir:
+ del sys.path[0]
+ del values['__name__']
+
+ # set the values specified on the command line
+ if args is None:
+ args = self.args
+
+ for arg, value in args.items():
+ added = False
+ for option in self.options:
+ if arg in option.aliases + [ option.key ]:
+ values[option.key] = value
+ added = True
+ if not added:
+ self.unknown[arg] = value
+
+ # put the variables in the environment:
+ # (don't copy over variables that are not declared as options)
+ for option in self.options:
+ try:
+ env[option.key] = values[option.key]
+ except KeyError:
+ pass
+
+ # Call the convert functions:
+ for option in self.options:
+ if option.converter and values.has_key(option.key):
+ value = env.subst('${%s}'%option.key)
+ try:
+ try:
+ env[option.key] = option.converter(value)
+ except TypeError:
+ env[option.key] = option.converter(value, env)
+ except ValueError, x:
+ raise SCons.Errors.UserError, 'Error converting option: %s\n%s'%(option.key, x)
+
+
+ # Finally validate the values:
+ for option in self.options:
+ if option.validator and values.has_key(option.key):
+ option.validator(option.key, env.subst('${%s}'%option.key), env)
+
+ def UnknownVariables(self):
+ """
+ Returns any options in the specified arguments lists that
+ were not known, declared options in this object.
+ """
+ return self.unknown
+
+ def Save(self, filename, env):
+ """
+ Saves all the options in the given file. This file can
+ then be used to load the options next run. This can be used
+ to create an option cache file.
+
+ filename - Name of the file to save into
+ env - the environment get the option values from
+ """
+
+ # Create the file and write out the header
+ try:
+ fh = open(filename, 'w')
+
+ try:
+ # Make an assignment in the file for each option
+ # within the environment that was assigned a value
+ # other than the default.
+ for option in self.options:
+ try:
+ value = env[option.key]
+ try:
+ prepare = value.prepare_to_store
+ except AttributeError:
+ try:
+ eval(repr(value))
+ except KeyboardInterrupt:
+ raise
+ except:
+ # Convert stuff that has a repr() that
+ # cannot be evaluated into a string
+ value = SCons.Util.to_String(value)
+ else:
+ value = prepare()
+
+ defaultVal = env.subst(SCons.Util.to_String(option.default))
+ if option.converter:
+ defaultVal = option.converter(defaultVal)
+
+ if str(env.subst('${%s}' % option.key)) != str(defaultVal):
+ fh.write('%s = %s\n' % (option.key, repr(value)))
+ except KeyError:
+ pass
+ finally:
+ fh.close()
+
+ except IOError, x:
+ raise SCons.Errors.UserError, 'Error writing options to file: %s\n%s' % (filename, x)
+
+ def GenerateHelpText(self, env, sort=None):
+ """
+ Generate the help text for the options.
+
+ env - an environment that is used to get the current values
+ of the options.
+ """
+
+ if sort:
+ options = self.options[:]
+ options.sort(lambda x,y,func=sort: func(x.key,y.key))
+ else:
+ options = self.options
+
+ def format(opt, self=self, env=env):
+ if env.has_key(opt.key):
+ actual = env.subst('${%s}' % opt.key)
+ else:
+ actual = None
+ return self.FormatVariableHelpText(env, opt.key, opt.help, opt.default, actual, opt.aliases)
+ lines = filter(None, map(format, options))
+
+ return string.join(lines, '')
+
+ format = '\n%s: %s\n default: %s\n actual: %s\n'
+ format_ = '\n%s: %s\n default: %s\n actual: %s\n aliases: %s\n'
+
+ def FormatVariableHelpText(self, env, key, help, default, actual, aliases=[]):
+ # Don't display the key name itself as an alias.
+ aliases = filter(lambda a, k=key: a != k, aliases)
+ if len(aliases)==0:
+ return self.format % (key, help, default, actual)
+ else:
+ return self.format_ % (key, help, default, actual, aliases)
+
diff --git a/tools/scons/scons-local-1.2.0/SCons/Warnings.py b/tools/scons/scons-local-1.2.0/SCons/Warnings.py
new file mode 100644
index 000000000..296d6d3c6
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/Warnings.py
@@ -0,0 +1,193 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+"""SCons.Warnings
+
+This file implements the warnings framework for SCons.
+
+"""
+
+__revision__ = "src/engine/SCons/Warnings.py 3842 2008/12/20 22:59:52 scons"
+
+import string
+import sys
+
+import SCons.Errors
+
+class Warning(SCons.Errors.UserError):
+ pass
+
+
+# NOTE: If you add a new warning class, add it to the man page, too!
+
+class CacheWriteErrorWarning(Warning):
+ pass
+
+class CorruptSConsignWarning(Warning):
+ pass
+
+class DependencyWarning(Warning):
+ pass
+
+class DeprecatedWarning(Warning):
+ pass
+
+class DeprecatedCopyWarning(DeprecatedWarning):
+ pass
+
+class DeprecatedOptionsWarning(DeprecatedWarning):
+ pass
+
+class DeprecatedSourceSignaturesWarning(DeprecatedWarning):
+ pass
+
+class DeprecatedTargetSignaturesWarning(DeprecatedWarning):
+ pass
+
+class DuplicateEnvironmentWarning(Warning):
+ pass
+
+class FutureReservedVariableWarning(Warning):
+ pass
+
+class LinkWarning(Warning):
+ pass
+
+class MisleadingKeywordsWarning(Warning):
+ pass
+
+class MissingSConscriptWarning(Warning):
+ pass
+
+class NoMD5ModuleWarning(Warning):
+ pass
+
+class NoMetaclassSupportWarning(Warning):
+ pass
+
+class NoObjectCountWarning(Warning):
+ pass
+
+class NoParallelSupportWarning(Warning):
+ pass
+
+class PythonVersionWarning(DeprecatedWarning):
+ pass
+
+class ReservedVariableWarning(Warning):
+ pass
+
+class StackSizeWarning(Warning):
+ pass
+
+class FortranCxxMixWarning(LinkWarning):
+ pass
+
+_warningAsException = 0
+
+# The below is a list of 2-tuples. The first element is a class object.
+# The second element is true if that class is enabled, false if it is disabled.
+_enabled = []
+
+_warningOut = None
+
+def suppressWarningClass(clazz):
+ """Suppresses all warnings that are of type clazz or
+ derived from clazz."""
+ _enabled.insert(0, (clazz, 0))
+
+def enableWarningClass(clazz):
+ """Suppresses all warnings that are of type clazz or
+ derived from clazz."""
+ _enabled.insert(0, (clazz, 1))
+
+def warningAsException(flag=1):
+ """Turn warnings into exceptions. Returns the old value of the flag."""
+ global _warningAsException
+ old = _warningAsException
+ _warningAsException = flag
+ return old
+
+def warn(clazz, *args):
+ global _enabled, _warningAsException, _warningOut
+
+ warning = clazz(args)
+ for clazz, flag in _enabled:
+ if isinstance(warning, clazz):
+ if flag:
+ if _warningAsException:
+ raise warning
+
+ if _warningOut:
+ _warningOut(warning)
+ break
+
+def process_warn_strings(arguments):
+ """Process string specifications of enabling/disabling warnings,
+ as passed to the --warn option or the SetOption('warn') function.
+
+
+ An argument to this option should be of the form <warning-class>
+ or no-<warning-class>. The warning class is munged in order
+ to get an actual class name from the classes above, which we
+ need to pass to the {enable,disable}WarningClass() functions.
+ The supplied <warning-class> is split on hyphens, each element
+ is capitalized, then smushed back together. Then the string
+ "Warning" is appended to get the class name.
+
+ For example, 'deprecated' will enable the DeprecatedWarning
+ class. 'no-dependency' will disable the .DependencyWarning
+ class.
+
+ As a special case, --warn=all and --warn=no-all will enable or
+ disable (respectively) the base Warning class of all warnings.
+
+ """
+
+ def _capitalize(s):
+ if s[:5] == "scons":
+ return "SCons" + s[5:]
+ else:
+ return string.capitalize(s)
+
+ for arg in arguments:
+
+ elems = string.split(string.lower(arg), '-')
+ enable = 1
+ if elems[0] == 'no':
+ enable = 0
+ del elems[0]
+
+ if len(elems) == 1 and elems[0] == 'all':
+ class_name = "Warning"
+ else:
+ class_name = string.join(map(_capitalize, elems), '') + "Warning"
+ try:
+ clazz = globals()[class_name]
+ except KeyError:
+ sys.stderr.write("No warning type: '%s'\n" % arg)
+ else:
+ if enable:
+ enableWarningClass(clazz)
+ else:
+ suppressWarningClass(clazz)
diff --git a/tools/scons/scons-local-1.2.0/SCons/__init__.py b/tools/scons/scons-local-1.2.0/SCons/__init__.py
new file mode 100644
index 000000000..c006699bb
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/__init__.py
@@ -0,0 +1,43 @@
+"""SCons
+
+The main package for the SCons software construction utility.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+__version__ = "1.2.0"
+
+__build__ = "r3842"
+
+__buildsys__ = "scons-dev"
+
+__date__ = "2008/12/20 22:59:52"
+
+__developer__ = "scons"
+
+# make sure compatibility is always in place
+import SCons.compat
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/__init__.py b/tools/scons/scons-local-1.2.0/SCons/compat/__init__.py
new file mode 100644
index 000000000..c285db357
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/__init__.py
@@ -0,0 +1,244 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = """
+SCons compatibility package for old Python versions
+
+This subpackage holds modules that provide backwards-compatible
+implementations of various things that we'd like to use in SCons but which
+only show up in later versions of Python than the early, old version(s)
+we still support.
+
+Other code will not generally reference things in this package through
+the SCons.compat namespace. The modules included here add things to
+the __builtin__ namespace or the global module list so that the rest
+of our code can use the objects and names imported here regardless of
+Python version.
+
+Simply enough, things that go in the __builtin__ name space come from
+our builtins module.
+
+The rest of the things here will be in individual compatibility modules
+that are either: 1) suitably modified copies of the future modules that
+we want to use; or 2) backwards compatible re-implementations of the
+specific portions of a future module's API that we want to use.
+
+GENERAL WARNINGS: Implementations of functions in the SCons.compat
+modules are *NOT* guaranteed to be fully compliant with these functions in
+later versions of Python. We are only concerned with adding functionality
+that we actually use in SCons, so be wary if you lift this code for
+other uses. (That said, making these more nearly the same as later,
+official versions is still a desirable goal, we just don't need to be
+obsessive about it.)
+
+We name the compatibility modules with an initial '_scons_' (for example,
+_scons_subprocess.py is our compatibility module for subprocess) so
+that we can still try to import the real module name and fall back to
+our compatibility module if we get an ImportError. The import_as()
+function defined below loads the module as the "real" name (without the
+'_scons'), after which all of the "import {module}" statements in the
+rest of our code will find our pre-loaded compatibility module.
+"""
+
+__revision__ = "src/engine/SCons/compat/__init__.py 3842 2008/12/20 22:59:52 scons"
+
+def import_as(module, name):
+ """
+ Imports the specified module (from our local directory) as the
+ specified name.
+ """
+ import imp
+ import os.path
+ dir = os.path.split(__file__)[0]
+ file, filename, suffix_mode_type = imp.find_module(module, [dir])
+ imp.load_module(name, file, filename, suffix_mode_type)
+
+import builtins
+
+try:
+ import hashlib
+except ImportError:
+ # Pre-2.5 Python has no hashlib module.
+ try:
+ import_as('_scons_hashlib', 'hashlib')
+ except ImportError:
+ # If we failed importing our compatibility module, it probably
+ # means this version of Python has no md5 module. Don't do
+ # anything and let the higher layer discover this fact, so it
+ # can fall back to using timestamp.
+ pass
+
+try:
+ set
+except NameError:
+ # Pre-2.4 Python has no native set type
+ try:
+ # Python 2.2 and 2.3 can use the copy of the 2.[45] sets module
+ # that we grabbed.
+ import_as('_scons_sets', 'sets')
+ except (ImportError, SyntaxError):
+ # Python 1.5 (ImportError, no __future_ module) and 2.1
+ # (SyntaxError, no generators in __future__) will blow up
+ # trying to import the 2.[45] sets module, so back off to a
+ # custom sets module that can be discarded easily when we
+ # stop supporting those versions.
+ import_as('_scons_sets15', 'sets')
+ import __builtin__
+ import sets
+ __builtin__.set = sets.Set
+
+import fnmatch
+try:
+ fnmatch.filter
+except AttributeError:
+ # Pre-2.2 Python has no fnmatch.filter() function.
+ def filter(names, pat):
+ """Return the subset of the list NAMES that match PAT"""
+ import os,posixpath
+ result=[]
+ pat = os.path.normcase(pat)
+ if not fnmatch._cache.has_key(pat):
+ import re
+ res = fnmatch.translate(pat)
+ fnmatch._cache[pat] = re.compile(res)
+ match = fnmatch._cache[pat].match
+ if os.path is posixpath:
+ # normcase on posix is NOP. Optimize it away from the loop.
+ for name in names:
+ if match(name):
+ result.append(name)
+ else:
+ for name in names:
+ if match(os.path.normcase(name)):
+ result.append(name)
+ return result
+ fnmatch.filter = filter
+ del filter
+
+try:
+ import itertools
+except ImportError:
+ # Pre-2.3 Python has no itertools module.
+ import_as('_scons_itertools', 'itertools')
+
+# If we need the compatibility version of textwrap, it must be imported
+# before optparse, which uses it.
+try:
+ import textwrap
+except ImportError:
+ # Pre-2.3 Python has no textwrap module.
+ import_as('_scons_textwrap', 'textwrap')
+
+try:
+ import optparse
+except ImportError:
+ # Pre-2.3 Python has no optparse module.
+ import_as('_scons_optparse', 'optparse')
+
+import os
+try:
+ os.devnull
+except AttributeError:
+ # Pre-2.4 Python has no os.devnull attribute
+ import sys
+ _names = sys.builtin_module_names
+ if 'posix' in _names:
+ os.devnull = '/dev/null'
+ elif 'nt' in _names:
+ os.devnull = 'nul'
+ os.path.devnull = os.devnull
+
+import shlex
+try:
+ shlex.split
+except AttributeError:
+ # Pre-2.3 Python has no shlex.split() function.
+ #
+ # The full white-space splitting semantics of shlex.split() are
+ # complicated to reproduce by hand, so just use a compatibility
+ # version of the shlex module cribbed from Python 2.5 with some
+ # minor modifications for older Python versions.
+ del shlex
+ import_as('_scons_shlex', 'shlex')
+
+
+import shutil
+try:
+ shutil.move
+except AttributeError:
+ # Pre-2.3 Python has no shutil.move() function.
+ #
+ # Cribbed from Python 2.5.
+ import os
+
+ def move(src, dst):
+ """Recursively move a file or directory to another location.
+
+ If the destination is on our current filesystem, then simply use
+ rename. Otherwise, copy src to the dst and then remove src.
+ A lot more could be done here... A look at a mv.c shows a lot of
+ the issues this implementation glosses over.
+
+ """
+ try:
+ os.rename(src, dst)
+ except OSError:
+ if os.path.isdir(src):
+ if shutil.destinsrc(src, dst):
+ raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
+ shutil.copytree(src, dst, symlinks=True)
+ shutil.rmtree(src)
+ else:
+ shutil.copy2(src,dst)
+ os.unlink(src)
+ shutil.move = move
+ del move
+
+ def destinsrc(src, dst):
+ src = os.path.abspath(src)
+ return os.path.abspath(dst)[:len(src)] == src
+ shutil.destinsrc = destinsrc
+ del destinsrc
+
+
+try:
+ import subprocess
+except ImportError:
+ # Pre-2.4 Python has no subprocess module.
+ import_as('_scons_subprocess', 'subprocess')
+
+import sys
+try:
+ sys.version_info
+except AttributeError:
+ # Pre-1.6 Python has no sys.version_info
+ import string
+ version_string = string.split(sys.version)[0]
+ version_ints = map(int, string.split(version_string, '.'))
+ sys.version_info = tuple(version_ints + ['final', 0])
+
+try:
+ import UserString
+except ImportError:
+ # Pre-1.6 Python has no UserString module.
+ import_as('_scons_UserString', 'UserString')
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_UserString.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_UserString.py
new file mode 100644
index 000000000..cde813d9d
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_UserString.py
@@ -0,0 +1,92 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/compat/_scons_UserString.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """
+A user-defined wrapper around string objects
+
+This class is "borrowed" from the Python 2.2 UserString and modified
+slightly for use with SCons. It is *NOT* guaranteed to be fully compliant
+with the standard UserString class from all later versions of Python.
+In particular, it does not necessarily contain all of the methods found
+in later versions.
+"""
+
+import types
+
+StringType = types.StringType
+
+if hasattr(types, 'UnicodeType'):
+ UnicodeType = types.UnicodeType
+ def is_String(obj):
+ return type(obj) in (StringType, UnicodeType)
+else:
+ def is_String(obj):
+ return type(obj) is StringType
+
+class UserString:
+ def __init__(self, seq):
+ if is_String(seq):
+ self.data = seq
+ elif isinstance(seq, UserString):
+ self.data = seq.data[:]
+ else:
+ self.data = str(seq)
+ def __str__(self): return str(self.data)
+ def __repr__(self): return repr(self.data)
+ def __int__(self): return int(self.data)
+ def __long__(self): return long(self.data)
+ def __float__(self): return float(self.data)
+ def __complex__(self): return complex(self.data)
+ def __hash__(self): return hash(self.data)
+
+ def __cmp__(self, string):
+ if isinstance(string, UserString):
+ return cmp(self.data, string.data)
+ else:
+ return cmp(self.data, string)
+ def __contains__(self, char):
+ return char in self.data
+
+ def __len__(self): return len(self.data)
+ def __getitem__(self, index): return self.__class__(self.data[index])
+ def __getslice__(self, start, end):
+ start = max(start, 0); end = max(end, 0)
+ return self.__class__(self.data[start:end])
+
+ def __add__(self, other):
+ if isinstance(other, UserString):
+ return self.__class__(self.data + other.data)
+ elif is_String(other):
+ return self.__class__(self.data + other)
+ else:
+ return self.__class__(self.data + str(other))
+ def __radd__(self, other):
+ if is_String(other):
+ return self.__class__(other + self.data)
+ else:
+ return self.__class__(str(other) + self.data)
+ def __mul__(self, n):
+ return self.__class__(self.data*n)
+ __rmul__ = __mul__
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_hashlib.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_hashlib.py
new file mode 100644
index 000000000..97bf8d94e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_hashlib.py
@@ -0,0 +1,85 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__doc__ = """
+hashlib backwards-compatibility module for older (pre-2.5) Python versions
+
+This does not not NOT (repeat, *NOT*) provide complete hashlib
+functionality. It only wraps the portions of MD5 functionality used
+by SCons, in an interface that looks like hashlib (or enough for our
+purposes, anyway). In fact, this module will raise an ImportError if
+the underlying md5 module isn't available.
+"""
+
+__revision__ = "src/engine/SCons/compat/_scons_hashlib.py 3842 2008/12/20 22:59:52 scons"
+
+import md5
+import string
+
+class md5obj:
+
+ md5_module = md5
+
+ def __init__(self, name, string=''):
+ if not name in ('MD5', 'md5'):
+ raise ValueError, "unsupported hash type"
+ self.name = 'md5'
+ self.m = self.md5_module.md5()
+
+ def __repr__(self):
+ return '<%s HASH object @ %#x>' % (self.name, id(self))
+
+ def copy(self):
+ import copy
+ result = copy.copy(self)
+ result.m = self.m.copy()
+ return result
+
+ def digest(self):
+ return self.m.digest()
+
+ def update(self, arg):
+ return self.m.update(arg)
+
+ if hasattr(md5.md5(), 'hexdigest'):
+
+ def hexdigest(self):
+ return self.m.hexdigest()
+
+ else:
+
+ # Objects created by the underlying md5 module have no native
+ # hexdigest() method (*cough* 1.5.2 *cough*), so provide an
+ # equivalent lifted from elsewhere.
+ def hexdigest(self):
+ h = string.hexdigits
+ r = ''
+ for c in self.digest():
+ i = ord(c)
+ r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
+ return r
+
+new = md5obj
+
+def md5(string=''):
+ return md5obj('md5', string)
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_itertools.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_itertools.py
new file mode 100644
index 000000000..145a7f9cf
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_itertools.py
@@ -0,0 +1,118 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/compat/_scons_itertools.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """
+Implementations of itertools functions for Python versions that don't
+have iterators.
+
+These implement the functions by creating the entire list, not returning
+it element-by-element as the real itertools functions do. This means
+that early Python versions won't get the performance benefit of using
+the itertools, but we can still use them so the later Python versions
+do get the advantages of using iterators.
+
+Because we return the entire list, we intentionally do not implement the
+itertools functions that "return" infinitely-long lists: the count(),
+cycle() and repeat() functions. Other functions below have remained
+unimplemented simply because they aren't being used (yet) and it wasn't
+obvious how to do it. Or, conversely, we only implemented those functions
+that *were* easy to implement (mostly because the Python documentation
+contained examples of equivalent code).
+
+Note that these do not have independent unit tests, so it's possible
+that there are bugs.
+"""
+
+def chain(*iterables):
+ result = []
+ for x in iterables:
+ result.extend(list(x))
+ return result
+
+def count(n=0):
+ # returns infinite length, should not be supported
+ raise NotImplementedError
+
+def cycle(iterable):
+ # returns infinite length, should not be supported
+ raise NotImplementedError
+
+def dropwhile(predicate, iterable):
+ result = []
+ for x in iterable:
+ if not predicate(x):
+ result.append(x)
+ break
+ result.extend(iterable)
+ return result
+
+def groupby(iterable, *args):
+ raise NotImplementedError
+
+def ifilter(predicate, iterable):
+ result = []
+ if predicate is None:
+ predicate = bool
+ for x in iterable:
+ if predicate(x):
+ result.append(x)
+ return result
+
+def ifilterfalse(predicate, iterable):
+ result = []
+ if predicate is None:
+ predicate = bool
+ for x in iterable:
+ if not predicate(x):
+ result.append(x)
+ return result
+
+def imap(function, *iterables):
+ return apply(map, (function,) + tuple(iterables))
+
+def islice(*args, **kw):
+ raise NotImplementedError
+
+def izip(*iterables):
+ return apply(zip, iterables)
+
+def repeat(*args, **kw):
+ # returns infinite length, should not be supported
+ raise NotImplementedError
+
+def starmap(*args, **kw):
+ raise NotImplementedError
+
+def takewhile(predicate, iterable):
+ result = []
+ for x in iterable:
+ if predicate(x):
+ result.append(x)
+ else:
+ break
+ return result
+
+def tee(*args, **kw):
+ raise NotImplementedError
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_optparse.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_optparse.py
new file mode 100644
index 000000000..6b376875e
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_optparse.py
@@ -0,0 +1,1719 @@
+"""optparse - a powerful, extensible, and easy-to-use option parser.
+
+By Greg Ward <gward@python.net>
+
+Originally distributed as Optik; see http://optik.sourceforge.net/ .
+
+If you have problems with this module, please do not file bugs,
+patches, or feature requests with Python; instead, use Optik's
+SourceForge project page:
+ http://sourceforge.net/projects/optik
+
+For support, use the optik-users@lists.sourceforge.net mailing list
+(http://lists.sourceforge.net/lists/listinfo/optik-users).
+"""
+
+# Python developers: please do not make changes to this file, since
+# it is automatically generated from the Optik source code.
+
+__version__ = "1.5.3"
+
+__all__ = ['Option',
+ 'SUPPRESS_HELP',
+ 'SUPPRESS_USAGE',
+ 'Values',
+ 'OptionContainer',
+ 'OptionGroup',
+ 'OptionParser',
+ 'HelpFormatter',
+ 'IndentedHelpFormatter',
+ 'TitledHelpFormatter',
+ 'OptParseError',
+ 'OptionError',
+ 'OptionConflictError',
+ 'OptionValueError',
+ 'BadOptionError']
+
+__copyright__ = """
+Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved.
+Copyright (c) 2002-2006 Python Software Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+ * Neither the name of the author nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import string
+import sys, os
+import types
+import textwrap
+
+def _repr(self):
+ return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self)
+
+
+try:
+ sys.getdefaultencoding
+except AttributeError:
+ def fake_getdefaultencoding():
+ return None
+ sys.getdefaultencoding = fake_getdefaultencoding
+
+try:
+ ''.encode
+except AttributeError:
+ def encode_wrapper(s, encoding, replacement):
+ return s
+else:
+ def encode_wrapper(s, encoding, replacement):
+ return s.encode(encoding, replacement)
+
+
+# This file was generated from:
+# Id: option_parser.py 527 2006-07-23 15:21:30Z greg
+# Id: option.py 522 2006-06-11 16:22:03Z gward
+# Id: help.py 527 2006-07-23 15:21:30Z greg
+# Id: errors.py 509 2006-04-20 00:58:24Z gward
+
+try:
+ from gettext import gettext
+except ImportError:
+ def gettext(message):
+ return message
+_ = gettext
+
+
+class OptParseError (Exception):
+ def __init__(self, msg):
+ self.msg = msg
+
+ def __str__(self):
+ return self.msg
+
+
+class OptionError (OptParseError):
+ """
+ Raised if an Option instance is created with invalid or
+ inconsistent arguments.
+ """
+
+ def __init__(self, msg, option):
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self):
+ if self.option_id:
+ return "option %s: %s" % (self.option_id, self.msg)
+ else:
+ return self.msg
+
+class OptionConflictError (OptionError):
+ """
+ Raised if conflicting options are added to an OptionParser.
+ """
+
+class OptionValueError (OptParseError):
+ """
+ Raised if an invalid option value is encountered on the command
+ line.
+ """
+
+class BadOptionError (OptParseError):
+ """
+ Raised if an invalid option is seen on the command line.
+ """
+ def __init__(self, opt_str):
+ self.opt_str = opt_str
+
+ def __str__(self):
+ return _("no such option: %s") % self.opt_str
+
+class AmbiguousOptionError (BadOptionError):
+ """
+ Raised if an ambiguous option is seen on the command line.
+ """
+ def __init__(self, opt_str, possibilities):
+ BadOptionError.__init__(self, opt_str)
+ self.possibilities = possibilities
+
+ def __str__(self):
+ return (_("ambiguous option: %s (%s?)")
+ % (self.opt_str, string.join(self.possibilities, ", ")))
+
+
+class HelpFormatter:
+
+ """
+ Abstract base class for formatting option help. OptionParser
+ instances should use one of the HelpFormatter subclasses for
+ formatting help; by default IndentedHelpFormatter is used.
+
+ Instance attributes:
+ parser : OptionParser
+ the controlling OptionParser instance
+ indent_increment : int
+ the number of columns to indent per nesting level
+ max_help_position : int
+ the maximum starting column for option help text
+ help_position : int
+ the calculated starting column for option help text;
+ initially the same as the maximum
+ width : int
+ total number of columns for output (pass None to constructor for
+ this value to be taken from the $COLUMNS environment variable)
+ level : int
+ current indentation level
+ current_indent : int
+ current indentation level (in columns)
+ help_width : int
+ number of columns available for option help text (calculated)
+ default_tag : str
+ text to replace with each option's default value, "%default"
+ by default. Set to false value to disable default value expansion.
+ option_strings : { Option : str }
+ maps Option instances to the snippet of help text explaining
+ the syntax of that option, e.g. "-h, --help" or
+ "-fFILE, --file=FILE"
+ _short_opt_fmt : str
+ format string controlling how short options with values are
+ printed in help text. Must be either "%s%s" ("-fFILE") or
+ "%s %s" ("-f FILE"), because those are the two syntaxes that
+ Optik supports.
+ _long_opt_fmt : str
+ similar but for long options; must be either "%s %s" ("--file FILE")
+ or "%s=%s" ("--file=FILE").
+ """
+
+ NO_DEFAULT_VALUE = "none"
+
+ def __init__(self,
+ indent_increment,
+ max_help_position,
+ width,
+ short_first):
+ self.parser = None
+ self.indent_increment = indent_increment
+ self.help_position = self.max_help_position = max_help_position
+ if width is None:
+ try:
+ width = int(os.environ['COLUMNS'])
+ except (KeyError, ValueError):
+ width = 80
+ width = width - 2
+ self.width = width
+ self.current_indent = 0
+ self.level = 0
+ self.help_width = None # computed later
+ self.short_first = short_first
+ self.default_tag = "%default"
+ self.option_strings = {}
+ self._short_opt_fmt = "%s %s"
+ self._long_opt_fmt = "%s=%s"
+
+ def set_parser(self, parser):
+ self.parser = parser
+
+ def set_short_opt_delimiter(self, delim):
+ if delim not in ("", " "):
+ raise ValueError(
+ "invalid metavar delimiter for short options: %r" % delim)
+ self._short_opt_fmt = "%s" + delim + "%s"
+
+ def set_long_opt_delimiter(self, delim):
+ if delim not in ("=", " "):
+ raise ValueError(
+ "invalid metavar delimiter for long options: %r" % delim)
+ self._long_opt_fmt = "%s" + delim + "%s"
+
+ def indent(self):
+ self.current_indent = self.current_indent + self.indent_increment
+ self.level = self.level + 1
+
+ def dedent(self):
+ self.current_indent = self.current_indent - self.indent_increment
+ assert self.current_indent >= 0, "Indent decreased below 0."
+ self.level = self.level - 1
+
+ def format_usage(self, usage):
+ raise NotImplementedError, "subclasses must implement"
+
+ def format_heading(self, heading):
+ raise NotImplementedError, "subclasses must implement"
+
+ def _format_text(self, text):
+ """
+ Format a paragraph of free-form text for inclusion in the
+ help output at the current indentation level.
+ """
+ text_width = self.width - self.current_indent
+ indent = " "*self.current_indent
+ return textwrap.fill(text,
+ text_width,
+ initial_indent=indent,
+ subsequent_indent=indent)
+
+ def format_description(self, description):
+ if description:
+ return self._format_text(description) + "\n"
+ else:
+ return ""
+
+ def format_epilog(self, epilog):
+ if epilog:
+ return "\n" + self._format_text(epilog) + "\n"
+ else:
+ return ""
+
+
+ def expand_default(self, option):
+ if self.parser is None or not self.default_tag:
+ return option.help
+
+ default_value = self.parser.defaults.get(option.dest)
+ if default_value is NO_DEFAULT or default_value is None:
+ default_value = self.NO_DEFAULT_VALUE
+
+ return string.replace(option.help, self.default_tag, str(default_value))
+
+ def format_option(self, option):
+ # The help for each option consists of two parts:
+ # * the opt strings and metavars
+ # eg. ("-x", or "-fFILENAME, --file=FILENAME")
+ # * the user-supplied help string
+ # eg. ("turn on expert mode", "read data from FILENAME")
+ #
+ # If possible, we write both of these on the same line:
+ # -x turn on expert mode
+ #
+ # But if the opt string list is too long, we put the help
+ # string on a second line, indented to the same column it would
+ # start in if it fit on the first line.
+ # -fFILENAME, --file=FILENAME
+ # read data from FILENAME
+ result = []
+ opts = self.option_strings[option]
+ opt_width = self.help_position - self.current_indent - 2
+ if len(opts) > opt_width:
+ opts = "%*s%s\n" % (self.current_indent, "", opts)
+ indent_first = self.help_position
+ else: # start help on same line as opts
+ opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts)
+ indent_first = 0
+ result.append(opts)
+ if option.help:
+ help_text = self.expand_default(option)
+ help_lines = textwrap.wrap(help_text, self.help_width)
+ result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
+ for line in help_lines[1:]:
+ result.append("%*s%s\n" % (self.help_position, "", line))
+ elif opts[-1] != "\n":
+ result.append("\n")
+ return string.join(result, "")
+
+ def store_option_strings(self, parser):
+ self.indent()
+ max_len = 0
+ for opt in parser.option_list:
+ strings = self.format_option_strings(opt)
+ self.option_strings[opt] = strings
+ max_len = max(max_len, len(strings) + self.current_indent)
+ self.indent()
+ for group in parser.option_groups:
+ for opt in group.option_list:
+ strings = self.format_option_strings(opt)
+ self.option_strings[opt] = strings
+ max_len = max(max_len, len(strings) + self.current_indent)
+ self.dedent()
+ self.dedent()
+ self.help_position = min(max_len + 2, self.max_help_position)
+ self.help_width = self.width - self.help_position
+
+ def format_option_strings(self, option):
+ """Return a comma-separated list of option strings & metavariables."""
+ if option.takes_value():
+ metavar = option.metavar or string.upper(option.dest)
+ short_opts = []
+ for sopt in option._short_opts:
+ short_opts.append(self._short_opt_fmt % (sopt, metavar))
+ long_opts = []
+ for lopt in option._long_opts:
+ long_opts.append(self._long_opt_fmt % (lopt, metavar))
+ else:
+ short_opts = option._short_opts
+ long_opts = option._long_opts
+
+ if self.short_first:
+ opts = short_opts + long_opts
+ else:
+ opts = long_opts + short_opts
+
+ return string.join(opts, ", ")
+
+class IndentedHelpFormatter (HelpFormatter):
+ """Format help with indented section bodies.
+ """
+
+ def __init__(self,
+ indent_increment=2,
+ max_help_position=24,
+ width=None,
+ short_first=1):
+ HelpFormatter.__init__(
+ self, indent_increment, max_help_position, width, short_first)
+
+ def format_usage(self, usage):
+ return _("Usage: %s\n") % usage
+
+ def format_heading(self, heading):
+ return "%*s%s:\n" % (self.current_indent, "", heading)
+
+
+class TitledHelpFormatter (HelpFormatter):
+ """Format help with underlined section headers.
+ """
+
+ def __init__(self,
+ indent_increment=0,
+ max_help_position=24,
+ width=None,
+ short_first=0):
+ HelpFormatter.__init__ (
+ self, indent_increment, max_help_position, width, short_first)
+
+ def format_usage(self, usage):
+ return "%s %s\n" % (self.format_heading(_("Usage")), usage)
+
+ def format_heading(self, heading):
+ return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading))
+
+
+def _parse_num(val, type):
+ if string.lower(val[:2]) == "0x": # hexadecimal
+ radix = 16
+ elif string.lower(val[:2]) == "0b": # binary
+ radix = 2
+ val = val[2:] or "0" # have to remove "0b" prefix
+ elif val[:1] == "0": # octal
+ radix = 8
+ else: # decimal
+ radix = 10
+
+ return type(val, radix)
+
+def _parse_int(val):
+ return _parse_num(val, int)
+
+def _parse_long(val):
+ return _parse_num(val, long)
+
+try:
+ int('0', 10)
+except TypeError:
+ # Python 1.5.2 doesn't allow a radix value to be passed to int().
+ _parse_int = int
+
+try:
+ long('0', 10)
+except TypeError:
+ # Python 1.5.2 doesn't allow a radix value to be passed to long().
+ _parse_long = long
+
+_builtin_cvt = { "int" : (_parse_int, _("integer")),
+ "long" : (_parse_long, _("long integer")),
+ "float" : (float, _("floating-point")),
+ "complex" : (complex, _("complex")) }
+
+def check_builtin(option, opt, value):
+ (cvt, what) = _builtin_cvt[option.type]
+ try:
+ return cvt(value)
+ except ValueError:
+ raise OptionValueError(
+ _("option %s: invalid %s value: %r") % (opt, what, value))
+
+def check_choice(option, opt, value):
+ if value in option.choices:
+ return value
+ else:
+ choices = string.join(map(repr, option.choices), ", ")
+ raise OptionValueError(
+ _("option %s: invalid choice: %r (choose from %s)")
+ % (opt, value, choices))
+
+# Not supplying a default is different from a default of None,
+# so we need an explicit "not supplied" value.
+NO_DEFAULT = ("NO", "DEFAULT")
+
+
+class Option:
+ """
+ Instance attributes:
+ _short_opts : [string]
+ _long_opts : [string]
+
+ action : string
+ type : string
+ dest : string
+ default : any
+ nargs : int
+ const : any
+ choices : [string]
+ callback : function
+ callback_args : (any*)
+ callback_kwargs : { string : any }
+ help : string
+ metavar : string
+ """
+
+ # The list of instance attributes that may be set through
+ # keyword args to the constructor.
+ ATTRS = ['action',
+ 'type',
+ 'dest',
+ 'default',
+ 'nargs',
+ 'const',
+ 'choices',
+ 'callback',
+ 'callback_args',
+ 'callback_kwargs',
+ 'help',
+ 'metavar']
+
+ # The set of actions allowed by option parsers. Explicitly listed
+ # here so the constructor can validate its arguments.
+ ACTIONS = ("store",
+ "store_const",
+ "store_true",
+ "store_false",
+ "append",
+ "append_const",
+ "count",
+ "callback",
+ "help",
+ "version")
+
+ # The set of actions that involve storing a value somewhere;
+ # also listed just for constructor argument validation. (If
+ # the action is one of these, there must be a destination.)
+ STORE_ACTIONS = ("store",
+ "store_const",
+ "store_true",
+ "store_false",
+ "append",
+ "append_const",
+ "count")
+
+ # The set of actions for which it makes sense to supply a value
+ # type, ie. which may consume an argument from the command line.
+ TYPED_ACTIONS = ("store",
+ "append",
+ "callback")
+
+ # The set of actions which *require* a value type, ie. that
+ # always consume an argument from the command line.
+ ALWAYS_TYPED_ACTIONS = ("store",
+ "append")
+
+ # The set of actions which take a 'const' attribute.
+ CONST_ACTIONS = ("store_const",
+ "append_const")
+
+ # The set of known types for option parsers. Again, listed here for
+ # constructor argument validation.
+ TYPES = ("string", "int", "long", "float", "complex", "choice")
+
+ # Dictionary of argument checking functions, which convert and
+ # validate option arguments according to the option type.
+ #
+ # Signature of checking functions is:
+ # check(option : Option, opt : string, value : string) -> any
+ # where
+ # option is the Option instance calling the checker
+ # opt is the actual option seen on the command-line
+ # (eg. "-a", "--file")
+ # value is the option argument seen on the command-line
+ #
+ # The return value should be in the appropriate Python type
+ # for option.type -- eg. an integer if option.type == "int".
+ #
+ # If no checker is defined for a type, arguments will be
+ # unchecked and remain strings.
+ TYPE_CHECKER = { "int" : check_builtin,
+ "long" : check_builtin,
+ "float" : check_builtin,
+ "complex": check_builtin,
+ "choice" : check_choice,
+ }
+
+
+ # CHECK_METHODS is a list of unbound method objects; they are called
+ # by the constructor, in order, after all attributes are
+ # initialized. The list is created and filled in later, after all
+ # the methods are actually defined. (I just put it here because I
+ # like to define and document all class attributes in the same
+ # place.) Subclasses that add another _check_*() method should
+ # define their own CHECK_METHODS list that adds their check method
+ # to those from this class.
+ CHECK_METHODS = None
+
+
+ # -- Constructor/initialization methods ----------------------------
+
+ def __init__(self, *opts, **attrs):
+ # Set _short_opts, _long_opts attrs from 'opts' tuple.
+ # Have to be set now, in case no option strings are supplied.
+ self._short_opts = []
+ self._long_opts = []
+ opts = self._check_opt_strings(opts)
+ self._set_opt_strings(opts)
+
+ # Set all other attrs (action, type, etc.) from 'attrs' dict
+ self._set_attrs(attrs)
+
+ # Check all the attributes we just set. There are lots of
+ # complicated interdependencies, but luckily they can be farmed
+ # out to the _check_*() methods listed in CHECK_METHODS -- which
+ # could be handy for subclasses! The one thing these all share
+ # is that they raise OptionError if they discover a problem.
+ for checker in self.CHECK_METHODS:
+ checker(self)
+
+ def _check_opt_strings(self, opts):
+ # Filter out None because early versions of Optik had exactly
+ # one short option and one long option, either of which
+ # could be None.
+ opts = filter(None, opts)
+ if not opts:
+ raise TypeError("at least one option string must be supplied")
+ return opts
+
+ def _set_opt_strings(self, opts):
+ for opt in opts:
+ if len(opt) < 2:
+ raise OptionError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt, self)
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise OptionError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self)
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise OptionError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self)
+ self._long_opts.append(opt)
+
+ def _set_attrs(self, attrs):
+ for attr in self.ATTRS:
+ if attrs.has_key(attr):
+ setattr(self, attr, attrs[attr])
+ del attrs[attr]
+ else:
+ if attr == 'default':
+ setattr(self, attr, NO_DEFAULT)
+ else:
+ setattr(self, attr, None)
+ if attrs:
+ attrs = attrs.keys()
+ attrs.sort()
+ raise OptionError(
+ "invalid keyword arguments: %s" % string.join(attrs, ", "),
+ self)
+
+
+ # -- Constructor validation methods --------------------------------
+
+ def _check_action(self):
+ if self.action is None:
+ self.action = "store"
+ elif self.action not in self.ACTIONS:
+ raise OptionError("invalid action: %r" % self.action, self)
+
+ def _check_type(self):
+ if self.type is None:
+ if self.action in self.ALWAYS_TYPED_ACTIONS:
+ if self.choices is not None:
+ # The "choices" attribute implies "choice" type.
+ self.type = "choice"
+ else:
+ # No type given? "string" is the most sensible default.
+ self.type = "string"
+ else:
+ # Allow type objects or builtin type conversion functions
+ # (int, str, etc.) as an alternative to their names. (The
+ # complicated check of __builtin__ is only necessary for
+ # Python 2.1 and earlier, and is short-circuited by the
+ # first check on modern Pythons.)
+ import __builtin__
+ if ( type(self.type) is types.TypeType or
+ (hasattr(self.type, "__name__") and
+ getattr(__builtin__, self.type.__name__, None) is self.type) ):
+ self.type = self.type.__name__
+
+ if self.type == "str":
+ self.type = "string"
+
+ if self.type not in self.TYPES:
+ raise OptionError("invalid option type: %r" % self.type, self)
+ if self.action not in self.TYPED_ACTIONS:
+ raise OptionError(
+ "must not supply a type for action %r" % self.action, self)
+
+ def _check_choice(self):
+ if self.type == "choice":
+ if self.choices is None:
+ raise OptionError(
+ "must supply a list of choices for type 'choice'", self)
+ elif type(self.choices) not in (types.TupleType, types.ListType):
+ raise OptionError(
+ "choices must be a list of strings ('%s' supplied)"
+ % string.split(str(type(self.choices)), "'")[1], self)
+ elif self.choices is not None:
+ raise OptionError(
+ "must not supply choices for type %r" % self.type, self)
+
+ def _check_dest(self):
+ # No destination given, and we need one for this action. The
+ # self.type check is for callbacks that take a value.
+ takes_value = (self.action in self.STORE_ACTIONS or
+ self.type is not None)
+ if self.dest is None and takes_value:
+
+ # Glean a destination from the first long option string,
+ # or from the first short option string if no long options.
+ if self._long_opts:
+ # eg. "--foo-bar" -> "foo_bar"
+ self.dest = string.replace(self._long_opts[0][2:], '-', '_')
+ else:
+ self.dest = self._short_opts[0][1]
+
+ def _check_const(self):
+ if self.action not in self.CONST_ACTIONS and self.const is not None:
+ raise OptionError(
+ "'const' must not be supplied for action %r" % self.action,
+ self)
+
+ def _check_nargs(self):
+ if self.action in self.TYPED_ACTIONS:
+ if self.nargs is None:
+ self.nargs = 1
+ elif self.nargs is not None:
+ raise OptionError(
+ "'nargs' must not be supplied for action %r" % self.action,
+ self)
+
+ def _check_callback(self):
+ if self.action == "callback":
+ if not callable(self.callback):
+ raise OptionError(
+ "callback not callable: %r" % self.callback, self)
+ if (self.callback_args is not None and
+ type(self.callback_args) is not types.TupleType):
+ raise OptionError(
+ "callback_args, if supplied, must be a tuple: not %r"
+ % self.callback_args, self)
+ if (self.callback_kwargs is not None and
+ type(self.callback_kwargs) is not types.DictType):
+ raise OptionError(
+ "callback_kwargs, if supplied, must be a dict: not %r"
+ % self.callback_kwargs, self)
+ else:
+ if self.callback is not None:
+ raise OptionError(
+ "callback supplied (%r) for non-callback option"
+ % self.callback, self)
+ if self.callback_args is not None:
+ raise OptionError(
+ "callback_args supplied for non-callback option", self)
+ if self.callback_kwargs is not None:
+ raise OptionError(
+ "callback_kwargs supplied for non-callback option", self)
+
+
+ CHECK_METHODS = [_check_action,
+ _check_type,
+ _check_choice,
+ _check_dest,
+ _check_const,
+ _check_nargs,
+ _check_callback]
+
+
+ # -- Miscellaneous methods -----------------------------------------
+
+ def __str__(self):
+ return string.join(self._short_opts + self._long_opts, "/")
+
+ __repr__ = _repr
+
+ def takes_value(self):
+ return self.type is not None
+
+ def get_opt_string(self):
+ if self._long_opts:
+ return self._long_opts[0]
+ else:
+ return self._short_opts[0]
+
+
+ # -- Processing methods --------------------------------------------
+
+ def check_value(self, opt, value):
+ checker = self.TYPE_CHECKER.get(self.type)
+ if checker is None:
+ return value
+ else:
+ return checker(self, opt, value)
+
+ def convert_value(self, opt, value):
+ if value is not None:
+ if self.nargs == 1:
+ return self.check_value(opt, value)
+ else:
+ return tuple(map(lambda v, o=opt, s=self: s.check_value(o, v), value))
+
+ def process(self, opt, value, values, parser):
+
+ # First, convert the value(s) to the right type. Howl if any
+ # value(s) are bogus.
+ value = self.convert_value(opt, value)
+
+ # And then take whatever action is expected of us.
+ # This is a separate method to make life easier for
+ # subclasses to add new actions.
+ return self.take_action(
+ self.action, self.dest, opt, value, values, parser)
+
+ def take_action(self, action, dest, opt, value, values, parser):
+ if action == "store":
+ setattr(values, dest, value)
+ elif action == "store_const":
+ setattr(values, dest, self.const)
+ elif action == "store_true":
+ setattr(values, dest, True)
+ elif action == "store_false":
+ setattr(values, dest, False)
+ elif action == "append":
+ values.ensure_value(dest, []).append(value)
+ elif action == "append_const":
+ values.ensure_value(dest, []).append(self.const)
+ elif action == "count":
+ setattr(values, dest, values.ensure_value(dest, 0) + 1)
+ elif action == "callback":
+ args = self.callback_args or ()
+ kwargs = self.callback_kwargs or {}
+ apply(self.callback, (self, opt, value, parser,) + args, kwargs)
+ elif action == "help":
+ parser.print_help()
+ parser.exit()
+ elif action == "version":
+ parser.print_version()
+ parser.exit()
+ else:
+ raise RuntimeError, "unknown action %r" % self.action
+
+ return 1
+
+# class Option
+
+
+SUPPRESS_HELP = "SUPPRESS"+"HELP"
+SUPPRESS_USAGE = "SUPPRESS"+"USAGE"
+
+# For compatibility with Python 2.2
+try:
+ True, False
+except NameError:
+ (True, False) = (1, 0)
+
+try:
+ types.UnicodeType
+except AttributeError:
+ def isbasestring(x):
+ return isinstance(x, types.StringType)
+else:
+ def isbasestring(x):
+ return isinstance(x, types.StringType) or isinstance(x, types.UnicodeType)
+
+class Values:
+
+ def __init__(self, defaults=None):
+ if defaults:
+ for (attr, val) in defaults.items():
+ setattr(self, attr, val)
+
+ def __str__(self):
+ return str(self.__dict__)
+
+ __repr__ = _repr
+
+ def __cmp__(self, other):
+ if isinstance(other, Values):
+ return cmp(self.__dict__, other.__dict__)
+ elif isinstance(other, types.DictType):
+ return cmp(self.__dict__, other)
+ else:
+ return -1
+
+ def _update_careful(self, dict):
+ """
+ Update the option values from an arbitrary dictionary, but only
+ use keys from dict that already have a corresponding attribute
+ in self. Any keys in dict without a corresponding attribute
+ are silently ignored.
+ """
+ for attr in dir(self):
+ if dict.has_key(attr):
+ dval = dict[attr]
+ if dval is not None:
+ setattr(self, attr, dval)
+
+ def _update_loose(self, dict):
+ """
+ Update the option values from an arbitrary dictionary,
+ using all keys from the dictionary regardless of whether
+ they have a corresponding attribute in self or not.
+ """
+ self.__dict__.update(dict)
+
+ def _update(self, dict, mode):
+ if mode == "careful":
+ self._update_careful(dict)
+ elif mode == "loose":
+ self._update_loose(dict)
+ else:
+ raise ValueError, "invalid update mode: %r" % mode
+
+ def read_module(self, modname, mode="careful"):
+ __import__(modname)
+ mod = sys.modules[modname]
+ self._update(vars(mod), mode)
+
+ def read_file(self, filename, mode="careful"):
+ vars = {}
+ execfile(filename, vars)
+ self._update(vars, mode)
+
+ def ensure_value(self, attr, value):
+ if not hasattr(self, attr) or getattr(self, attr) is None:
+ setattr(self, attr, value)
+ return getattr(self, attr)
+
+
+class OptionContainer:
+
+ """
+ Abstract base class.
+
+ Class attributes:
+ standard_option_list : [Option]
+ list of standard options that will be accepted by all instances
+ of this parser class (intended to be overridden by subclasses).
+
+ Instance attributes:
+ option_list : [Option]
+ the list of Option objects contained by this OptionContainer
+ _short_opt : { string : Option }
+ dictionary mapping short option strings, eg. "-f" or "-X",
+ to the Option instances that implement them. If an Option
+ has multiple short option strings, it will appears in this
+ dictionary multiple times. [1]
+ _long_opt : { string : Option }
+ dictionary mapping long option strings, eg. "--file" or
+ "--exclude", to the Option instances that implement them.
+ Again, a given Option can occur multiple times in this
+ dictionary. [1]
+ defaults : { string : any }
+ dictionary mapping option destination names to default
+ values for each destination [1]
+
+ [1] These mappings are common to (shared by) all components of the
+ controlling OptionParser, where they are initially created.
+
+ """
+
+ def __init__(self, option_class, conflict_handler, description):
+ # Initialize the option list and related data structures.
+ # This method must be provided by subclasses, and it must
+ # initialize at least the following instance attributes:
+ # option_list, _short_opt, _long_opt, defaults.
+ self._create_option_list()
+
+ self.option_class = option_class
+ self.set_conflict_handler(conflict_handler)
+ self.set_description(description)
+
+ def _create_option_mappings(self):
+ # For use by OptionParser constructor -- create the master
+ # option mappings used by this OptionParser and all
+ # OptionGroups that it owns.
+ self._short_opt = {} # single letter -> Option instance
+ self._long_opt = {} # long option -> Option instance
+ self.defaults = {} # maps option dest -> default value
+
+
+ def _share_option_mappings(self, parser):
+ # For use by OptionGroup constructor -- use shared option
+ # mappings from the OptionParser that owns this OptionGroup.
+ self._short_opt = parser._short_opt
+ self._long_opt = parser._long_opt
+ self.defaults = parser.defaults
+
+ def set_conflict_handler(self, handler):
+ if handler not in ("error", "resolve"):
+ raise ValueError, "invalid conflict_resolution value %r" % handler
+ self.conflict_handler = handler
+
+ def set_description(self, description):
+ self.description = description
+
+ def get_description(self):
+ return self.description
+
+
+ def destroy(self):
+ """see OptionParser.destroy()."""
+ del self._short_opt
+ del self._long_opt
+ del self.defaults
+
+
+ # -- Option-adding methods -----------------------------------------
+
+ def _check_conflict(self, option):
+ conflict_opts = []
+ for opt in option._short_opts:
+ if self._short_opt.has_key(opt):
+ conflict_opts.append((opt, self._short_opt[opt]))
+ for opt in option._long_opts:
+ if self._long_opt.has_key(opt):
+ conflict_opts.append((opt, self._long_opt[opt]))
+
+ if conflict_opts:
+ handler = self.conflict_handler
+ if handler == "error":
+ raise OptionConflictError(
+ "conflicting option string(s): %s"
+ % string.join(map(lambda co: co[0], conflict_opts), ", "),
+ option)
+ elif handler == "resolve":
+ for (opt, c_option) in conflict_opts:
+ if opt[:2] == "--":
+ c_option._long_opts.remove(opt)
+ del self._long_opt[opt]
+ else:
+ c_option._short_opts.remove(opt)
+ del self._short_opt[opt]
+ if not (c_option._short_opts or c_option._long_opts):
+ c_option.container.option_list.remove(c_option)
+
+ def add_option(self, *args, **kwargs):
+ """add_option(Option)
+ add_option(opt_str, ..., kwarg=val, ...)
+ """
+ if type(args[0]) is types.StringType:
+ option = apply(self.option_class, args, kwargs)
+ elif len(args) == 1 and not kwargs:
+ option = args[0]
+ if not isinstance(option, Option):
+ raise TypeError, "not an Option instance: %r" % option
+ else:
+ raise TypeError, "invalid arguments"
+
+ self._check_conflict(option)
+
+ self.option_list.append(option)
+ option.container = self
+ for opt in option._short_opts:
+ self._short_opt[opt] = option
+ for opt in option._long_opts:
+ self._long_opt[opt] = option
+
+ if option.dest is not None: # option has a dest, we need a default
+ if option.default is not NO_DEFAULT:
+ self.defaults[option.dest] = option.default
+ elif not self.defaults.has_key(option.dest):
+ self.defaults[option.dest] = None
+
+ return option
+
+ def add_options(self, option_list):
+ for option in option_list:
+ self.add_option(option)
+
+ # -- Option query/removal methods ----------------------------------
+
+ def get_option(self, opt_str):
+ return (self._short_opt.get(opt_str) or
+ self._long_opt.get(opt_str))
+
+ def has_option(self, opt_str):
+ return (self._short_opt.has_key(opt_str) or
+ self._long_opt.has_key(opt_str))
+
+ def remove_option(self, opt_str):
+ option = self._short_opt.get(opt_str)
+ if option is None:
+ option = self._long_opt.get(opt_str)
+ if option is None:
+ raise ValueError("no such option %r" % opt_str)
+
+ for opt in option._short_opts:
+ del self._short_opt[opt]
+ for opt in option._long_opts:
+ del self._long_opt[opt]
+ option.container.option_list.remove(option)
+
+
+ # -- Help-formatting methods ---------------------------------------
+
+ def format_option_help(self, formatter):
+ if not self.option_list:
+ return ""
+ result = []
+ for option in self.option_list:
+ if not option.help is SUPPRESS_HELP:
+ result.append(formatter.format_option(option))
+ return string.join(result, "")
+
+ def format_description(self, formatter):
+ return formatter.format_description(self.get_description())
+
+ def format_help(self, formatter):
+ result = []
+ if self.description:
+ result.append(self.format_description(formatter))
+ if self.option_list:
+ result.append(self.format_option_help(formatter))
+ return string.join(result, "\n")
+
+
+class OptionGroup (OptionContainer):
+
+ def __init__(self, parser, title, description=None):
+ self.parser = parser
+ OptionContainer.__init__(
+ self, parser.option_class, parser.conflict_handler, description)
+ self.title = title
+
+ def _create_option_list(self):
+ self.option_list = []
+ self._share_option_mappings(self.parser)
+
+ def set_title(self, title):
+ self.title = title
+
+ def destroy(self):
+ """see OptionParser.destroy()."""
+ OptionContainer.destroy(self)
+ del self.option_list
+
+ # -- Help-formatting methods ---------------------------------------
+
+ def format_help(self, formatter):
+ result = formatter.format_heading(self.title)
+ formatter.indent()
+ result = result + OptionContainer.format_help(self, formatter)
+ formatter.dedent()
+ return result
+
+
+class OptionParser (OptionContainer):
+
+ """
+ Class attributes:
+ standard_option_list : [Option]
+ list of standard options that will be accepted by all instances
+ of this parser class (intended to be overridden by subclasses).
+
+ Instance attributes:
+ usage : string
+ a usage string for your program. Before it is displayed
+ to the user, "%prog" will be expanded to the name of
+ your program (self.prog or os.path.basename(sys.argv[0])).
+ prog : string
+ the name of the current program (to override
+ os.path.basename(sys.argv[0])).
+ epilog : string
+ paragraph of help text to print after option help
+
+ option_groups : [OptionGroup]
+ list of option groups in this parser (option groups are
+ irrelevant for parsing the command-line, but very useful
+ for generating help)
+
+ allow_interspersed_args : bool = true
+ if true, positional arguments may be interspersed with options.
+ Assuming -a and -b each take a single argument, the command-line
+ -ablah foo bar -bboo baz
+ will be interpreted the same as
+ -ablah -bboo -- foo bar baz
+ If this flag were false, that command line would be interpreted as
+ -ablah -- foo bar -bboo baz
+ -- ie. we stop processing options as soon as we see the first
+ non-option argument. (This is the tradition followed by
+ Python's getopt module, Perl's Getopt::Std, and other argument-
+ parsing libraries, but it is generally annoying to users.)
+
+ process_default_values : bool = true
+ if true, option default values are processed similarly to option
+ values from the command line: that is, they are passed to the
+ type-checking function for the option's type (as long as the
+ default value is a string). (This really only matters if you
+ have defined custom types; see SF bug #955889.) Set it to false
+ to restore the behaviour of Optik 1.4.1 and earlier.
+
+ rargs : [string]
+ the argument list currently being parsed. Only set when
+ parse_args() is active, and continually trimmed down as
+ we consume arguments. Mainly there for the benefit of
+ callback options.
+ largs : [string]
+ the list of leftover arguments that we have skipped while
+ parsing options. If allow_interspersed_args is false, this
+ list is always empty.
+ values : Values
+ the set of option values currently being accumulated. Only
+ set when parse_args() is active. Also mainly for callbacks.
+
+ Because of the 'rargs', 'largs', and 'values' attributes,
+ OptionParser is not thread-safe. If, for some perverse reason, you
+ need to parse command-line arguments simultaneously in different
+ threads, use different OptionParser instances.
+
+ """
+
+ standard_option_list = []
+
+ def __init__(self,
+ usage=None,
+ option_list=None,
+ option_class=Option,
+ version=None,
+ conflict_handler="error",
+ description=None,
+ formatter=None,
+ add_help_option=True,
+ prog=None,
+ epilog=None):
+ OptionContainer.__init__(
+ self, option_class, conflict_handler, description)
+ self.set_usage(usage)
+ self.prog = prog
+ self.version = version
+ self.allow_interspersed_args = True
+ self.process_default_values = True
+ if formatter is None:
+ formatter = IndentedHelpFormatter()
+ self.formatter = formatter
+ self.formatter.set_parser(self)
+ self.epilog = epilog
+
+ # Populate the option list; initial sources are the
+ # standard_option_list class attribute, the 'option_list'
+ # argument, and (if applicable) the _add_version_option() and
+ # _add_help_option() methods.
+ self._populate_option_list(option_list,
+ add_help=add_help_option)
+
+ self._init_parsing_state()
+
+
+ def destroy(self):
+ """
+ Declare that you are done with this OptionParser. This cleans up
+ reference cycles so the OptionParser (and all objects referenced by
+ it) can be garbage-collected promptly. After calling destroy(), the
+ OptionParser is unusable.
+ """
+ OptionContainer.destroy(self)
+ for group in self.option_groups:
+ group.destroy()
+ del self.option_list
+ del self.option_groups
+ del self.formatter
+
+
+ # -- Private methods -----------------------------------------------
+ # (used by our or OptionContainer's constructor)
+
+ def _create_option_list(self):
+ self.option_list = []
+ self.option_groups = []
+ self._create_option_mappings()
+
+ def _add_help_option(self):
+ self.add_option("-h", "--help",
+ action="help",
+ help=_("show this help message and exit"))
+
+ def _add_version_option(self):
+ self.add_option("--version",
+ action="version",
+ help=_("show program's version number and exit"))
+
+ def _populate_option_list(self, option_list, add_help=True):
+ if self.standard_option_list:
+ self.add_options(self.standard_option_list)
+ if option_list:
+ self.add_options(option_list)
+ if self.version:
+ self._add_version_option()
+ if add_help:
+ self._add_help_option()
+
+ def _init_parsing_state(self):
+ # These are set in parse_args() for the convenience of callbacks.
+ self.rargs = None
+ self.largs = None
+ self.values = None
+
+
+ # -- Simple modifier methods ---------------------------------------
+
+ def set_usage(self, usage):
+ if usage is None:
+ self.usage = _("%prog [options]")
+ elif usage is SUPPRESS_USAGE:
+ self.usage = None
+ # For backwards compatibility with Optik 1.3 and earlier.
+ elif string.lower(usage)[:7] == "usage: ":
+ self.usage = usage[7:]
+ else:
+ self.usage = usage
+
+ def enable_interspersed_args(self):
+ self.allow_interspersed_args = True
+
+ def disable_interspersed_args(self):
+ self.allow_interspersed_args = False
+
+ def set_process_default_values(self, process):
+ self.process_default_values = process
+
+ def set_default(self, dest, value):
+ self.defaults[dest] = value
+
+ def set_defaults(self, **kwargs):
+ self.defaults.update(kwargs)
+
+ def _get_all_options(self):
+ options = self.option_list[:]
+ for group in self.option_groups:
+ options.extend(group.option_list)
+ return options
+
+ def get_default_values(self):
+ if not self.process_default_values:
+ # Old, pre-Optik 1.5 behaviour.
+ return Values(self.defaults)
+
+ defaults = self.defaults.copy()
+ for option in self._get_all_options():
+ default = defaults.get(option.dest)
+ if isbasestring(default):
+ opt_str = option.get_opt_string()
+ defaults[option.dest] = option.check_value(opt_str, default)
+
+ return Values(defaults)
+
+
+ # -- OptionGroup methods -------------------------------------------
+
+ def add_option_group(self, *args, **kwargs):
+ # XXX lots of overlap with OptionContainer.add_option()
+ if type(args[0]) is types.StringType:
+ group = apply(OptionGroup, (self,) + args, kwargs)
+ elif len(args) == 1 and not kwargs:
+ group = args[0]
+ if not isinstance(group, OptionGroup):
+ raise TypeError, "not an OptionGroup instance: %r" % group
+ if group.parser is not self:
+ raise ValueError, "invalid OptionGroup (wrong parser)"
+ else:
+ raise TypeError, "invalid arguments"
+
+ self.option_groups.append(group)
+ return group
+
+ def get_option_group(self, opt_str):
+ option = (self._short_opt.get(opt_str) or
+ self._long_opt.get(opt_str))
+ if option and option.container is not self:
+ return option.container
+ return None
+
+
+ # -- Option-parsing methods ----------------------------------------
+
+ def _get_args(self, args):
+ if args is None:
+ return sys.argv[1:]
+ else:
+ return args[:] # don't modify caller's list
+
+ def parse_args(self, args=None, values=None):
+ """
+ parse_args(args : [string] = sys.argv[1:],
+ values : Values = None)
+ -> (values : Values, args : [string])
+
+ Parse the command-line options found in 'args' (default:
+ sys.argv[1:]). Any errors result in a call to 'error()', which
+ by default prints the usage message to stderr and calls
+ sys.exit() with an error message. On success returns a pair
+ (values, args) where 'values' is an Values instance (with all
+ your option values) and 'args' is the list of arguments left
+ over after parsing options.
+ """
+ rargs = self._get_args(args)
+ if values is None:
+ values = self.get_default_values()
+
+ # Store the halves of the argument list as attributes for the
+ # convenience of callbacks:
+ # rargs
+ # the rest of the command-line (the "r" stands for
+ # "remaining" or "right-hand")
+ # largs
+ # the leftover arguments -- ie. what's left after removing
+ # options and their arguments (the "l" stands for "leftover"
+ # or "left-hand")
+ self.rargs = rargs
+ self.largs = largs = []
+ self.values = values
+
+ try:
+ stop = self._process_args(largs, rargs, values)
+ except (BadOptionError, OptionValueError), err:
+ self.error(str(err))
+
+ args = largs + rargs
+ return self.check_values(values, args)
+
+ def check_values(self, values, args):
+ """
+ check_values(values : Values, args : [string])
+ -> (values : Values, args : [string])
+
+ Check that the supplied option values and leftover arguments are
+ valid. Returns the option values and leftover arguments
+ (possibly adjusted, possibly completely new -- whatever you
+ like). Default implementation just returns the passed-in
+ values; subclasses may override as desired.
+ """
+ return (values, args)
+
+ def _process_args(self, largs, rargs, values):
+ """_process_args(largs : [string],
+ rargs : [string],
+ values : Values)
+
+ Process command-line arguments and populate 'values', consuming
+ options and arguments from 'rargs'. If 'allow_interspersed_args' is
+ false, stop at the first non-option argument. If true, accumulate any
+ interspersed non-option arguments in 'largs'.
+ """
+ while rargs:
+ arg = rargs[0]
+ # We handle bare "--" explicitly, and bare "-" is handled by the
+ # standard arg handler since the short arg case ensures that the
+ # len of the opt string is greater than 1.
+ if arg == "--":
+ del rargs[0]
+ return
+ elif arg[0:2] == "--":
+ # process a single long option (possibly with value(s))
+ self._process_long_opt(rargs, values)
+ elif arg[:1] == "-" and len(arg) > 1:
+ # process a cluster of short options (possibly with
+ # value(s) for the last one only)
+ self._process_short_opts(rargs, values)
+ elif self.allow_interspersed_args:
+ largs.append(arg)
+ del rargs[0]
+ else:
+ return # stop now, leave this arg in rargs
+
+ # Say this is the original argument list:
+ # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+ # ^
+ # (we are about to process arg(i)).
+ #
+ # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+ # [arg0, ..., arg(i-1)] (any options and their arguments will have
+ # been removed from largs).
+ #
+ # The while loop will usually consume 1 or more arguments per pass.
+ # If it consumes 1 (eg. arg is an option that takes no arguments),
+ # then after _process_arg() is done the situation is:
+ #
+ # largs = subset of [arg0, ..., arg(i)]
+ # rargs = [arg(i+1), ..., arg(N-1)]
+ #
+ # If allow_interspersed_args is false, largs will always be
+ # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+ # not a very interesting subset!
+
+ def _match_long_opt(self, opt):
+ """_match_long_opt(opt : string) -> string
+
+ Determine which long option string 'opt' matches, ie. which one
+ it is an unambiguous abbrevation for. Raises BadOptionError if
+ 'opt' doesn't unambiguously match any long option string.
+ """
+ return _match_abbrev(opt, self._long_opt)
+
+ def _process_long_opt(self, rargs, values):
+ arg = rargs.pop(0)
+
+ # Value explicitly attached to arg? Pretend it's the next
+ # argument.
+ if "=" in arg:
+ (opt, next_arg) = string.split(arg, "=", 1)
+ rargs.insert(0, next_arg)
+ had_explicit_value = True
+ else:
+ opt = arg
+ had_explicit_value = False
+
+ opt = self._match_long_opt(opt)
+ option = self._long_opt[opt]
+ if option.takes_value():
+ nargs = option.nargs
+ if len(rargs) < nargs:
+ if nargs == 1:
+ self.error(_("%s option requires an argument") % opt)
+ else:
+ self.error(_("%s option requires %d arguments")
+ % (opt, nargs))
+ elif nargs == 1:
+ value = rargs.pop(0)
+ else:
+ value = tuple(rargs[0:nargs])
+ del rargs[0:nargs]
+
+ elif had_explicit_value:
+ self.error(_("%s option does not take a value") % opt)
+
+ else:
+ value = None
+
+ option.process(opt, value, values, self)
+
+ def _process_short_opts(self, rargs, values):
+ arg = rargs.pop(0)
+ stop = False
+ i = 1
+ for ch in arg[1:]:
+ opt = "-" + ch
+ option = self._short_opt.get(opt)
+ i = i + 1 # we have consumed a character
+
+ if not option:
+ raise BadOptionError(opt)
+ if option.takes_value():
+ # Any characters left in arg? Pretend they're the
+ # next arg, and stop consuming characters of arg.
+ if i < len(arg):
+ rargs.insert(0, arg[i:])
+ stop = True
+
+ nargs = option.nargs
+ if len(rargs) < nargs:
+ if nargs == 1:
+ self.error(_("%s option requires an argument") % opt)
+ else:
+ self.error(_("%s option requires %d arguments")
+ % (opt, nargs))
+ elif nargs == 1:
+ value = rargs.pop(0)
+ else:
+ value = tuple(rargs[0:nargs])
+ del rargs[0:nargs]
+
+ else: # option doesn't take a value
+ value = None
+
+ option.process(opt, value, values, self)
+
+ if stop:
+ break
+
+
+ # -- Feedback methods ----------------------------------------------
+
+ def get_prog_name(self):
+ if self.prog is None:
+ return os.path.basename(sys.argv[0])
+ else:
+ return self.prog
+
+ def expand_prog_name(self, s):
+ return string.replace(s, "%prog", self.get_prog_name())
+
+ def get_description(self):
+ return self.expand_prog_name(self.description)
+
+ def exit(self, status=0, msg=None):
+ if msg:
+ sys.stderr.write(msg)
+ sys.exit(status)
+
+ def error(self, msg):
+ """error(msg : string)
+
+ Print a usage message incorporating 'msg' to stderr and exit.
+ If you override this in a subclass, it should not return -- it
+ should either exit or raise an exception.
+ """
+ self.print_usage(sys.stderr)
+ self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg))
+
+ def get_usage(self):
+ if self.usage:
+ return self.formatter.format_usage(
+ self.expand_prog_name(self.usage))
+ else:
+ return ""
+
+ def print_usage(self, file=None):
+ """print_usage(file : file = stdout)
+
+ Print the usage message for the current program (self.usage) to
+ 'file' (default stdout). Any occurence of the string "%prog" in
+ self.usage is replaced with the name of the current program
+ (basename of sys.argv[0]). Does nothing if self.usage is empty
+ or not defined.
+ """
+ if self.usage:
+ file.write(self.get_usage() + '\n')
+
+ def get_version(self):
+ if self.version:
+ return self.expand_prog_name(self.version)
+ else:
+ return ""
+
+ def print_version(self, file=None):
+ """print_version(file : file = stdout)
+
+ Print the version message for this program (self.version) to
+ 'file' (default stdout). As with print_usage(), any occurence
+ of "%prog" in self.version is replaced by the current program's
+ name. Does nothing if self.version is empty or undefined.
+ """
+ if self.version:
+ file.write(self.get_version() + '\n')
+
+ def format_option_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ formatter.store_option_strings(self)
+ result = []
+ result.append(formatter.format_heading(_("Options")))
+ formatter.indent()
+ if self.option_list:
+ result.append(OptionContainer.format_option_help(self, formatter))
+ result.append("\n")
+ for group in self.option_groups:
+ result.append(group.format_help(formatter))
+ result.append("\n")
+ formatter.dedent()
+ # Drop the last "\n", or the header if no options or option groups:
+ return string.join(result[:-1], "")
+
+ def format_epilog(self, formatter):
+ return formatter.format_epilog(self.epilog)
+
+ def format_help(self, formatter=None):
+ if formatter is None:
+ formatter = self.formatter
+ result = []
+ if self.usage:
+ result.append(self.get_usage() + "\n")
+ if self.description:
+ result.append(self.format_description(formatter) + "\n")
+ result.append(self.format_option_help(formatter))
+ result.append(self.format_epilog(formatter))
+ return string.join(result, "")
+
+ # used by test suite
+ def _get_encoding(self, file):
+ encoding = getattr(file, "encoding", None)
+ if not encoding:
+ encoding = sys.getdefaultencoding()
+ return encoding
+
+ def print_help(self, file=None):
+ """print_help(file : file = stdout)
+
+ Print an extended help message, listing all options and any
+ help text provided with them, to 'file' (default stdout).
+ """
+ if file is None:
+ file = sys.stdout
+ encoding = self._get_encoding(file)
+ file.write(encode_wrapper(self.format_help(), encoding, "replace"))
+
+# class OptionParser
+
+
+def _match_abbrev(s, wordmap):
+ """_match_abbrev(s : string, wordmap : {string : Option}) -> string
+
+ Return the string key in 'wordmap' for which 's' is an unambiguous
+ abbreviation. If 's' is found to be ambiguous or doesn't match any of
+ 'words', raise BadOptionError.
+ """
+ # Is there an exact match?
+ if wordmap.has_key(s):
+ return s
+ else:
+ # Isolate all words with s as a prefix.
+ possibilities = filter(lambda w, s=s: w[:len(s)] == s, wordmap.keys())
+ # No exact match, so there had better be just one possibility.
+ if len(possibilities) == 1:
+ return possibilities[0]
+ elif not possibilities:
+ raise BadOptionError(s)
+ else:
+ # More than one possible completion: ambiguous prefix.
+ possibilities.sort()
+ raise AmbiguousOptionError(s, possibilities)
+
+
+# Some day, there might be many Option classes. As of Optik 1.3, the
+# preferred way to instantiate Options is indirectly, via make_option(),
+# which will become a factory function when there are many Option
+# classes.
+make_option = Option
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets.py
new file mode 100644
index 000000000..32a0dd64f
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets.py
@@ -0,0 +1,577 @@
+"""Classes to represent arbitrary sets (including sets of sets).
+
+This module implements sets using dictionaries whose values are
+ignored. The usual operations (union, intersection, deletion, etc.)
+are provided as both methods and operators.
+
+Important: sets are not sequences! While they support 'x in s',
+'len(s)', and 'for x in s', none of those operations are unique for
+sequences; for example, mappings support all three as well. The
+characteristic operation for sequences is subscripting with small
+integers: s[i], for i in range(len(s)). Sets don't support
+subscripting at all. Also, sequences allow multiple occurrences and
+their elements have a definite order; sets on the other hand don't
+record multiple occurrences and don't remember the order of element
+insertion (which is why they don't support s[i]).
+
+The following classes are provided:
+
+BaseSet -- All the operations common to both mutable and immutable
+ sets. This is an abstract class, not meant to be directly
+ instantiated.
+
+Set -- Mutable sets, subclass of BaseSet; not hashable.
+
+ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
+ An iterable argument is mandatory to create an ImmutableSet.
+
+_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
+ giving the same hash value as the immutable set equivalent
+ would have. Do not use this class directly.
+
+Only hashable objects can be added to a Set. In particular, you cannot
+really add a Set as an element to another Set; if you try, what is
+actually added is an ImmutableSet built from it (it compares equal to
+the one you tried adding).
+
+When you ask if `x in y' where x is a Set and y is a Set or
+ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
+what's tested is actually `z in y'.
+
+"""
+
+# Code history:
+#
+# - Greg V. Wilson wrote the first version, using a different approach
+# to the mutable/immutable problem, and inheriting from dict.
+#
+# - Alex Martelli modified Greg's version to implement the current
+# Set/ImmutableSet approach, and make the data an attribute.
+#
+# - Guido van Rossum rewrote much of the code, made some API changes,
+# and cleaned up the docstrings.
+#
+# - Raymond Hettinger added a number of speedups and other
+# improvements.
+
+from __future__ import generators
+try:
+ from itertools import ifilter, ifilterfalse
+except ImportError:
+ # Code to make the module run under Py2.2
+ def ifilter(predicate, iterable):
+ if predicate is None:
+ def predicate(x):
+ return x
+ for x in iterable:
+ if predicate(x):
+ yield x
+ def ifilterfalse(predicate, iterable):
+ if predicate is None:
+ def predicate(x):
+ return x
+ for x in iterable:
+ if not predicate(x):
+ yield x
+ try:
+ True, False
+ except NameError:
+ True, False = (0==0, 0!=0)
+
+__all__ = ['BaseSet', 'Set', 'ImmutableSet']
+
+class BaseSet(object):
+ """Common base class for mutable and immutable sets."""
+
+ __slots__ = ['_data']
+
+ # Constructor
+
+ def __init__(self):
+ """This is an abstract class."""
+ # Don't call this from a concrete subclass!
+ if self.__class__ is BaseSet:
+ raise TypeError, ("BaseSet is an abstract class. "
+ "Use Set or ImmutableSet.")
+
+ # Standard protocols: __len__, __repr__, __str__, __iter__
+
+ def __len__(self):
+ """Return the number of elements of a set."""
+ return len(self._data)
+
+ def __repr__(self):
+ """Return string representation of a set.
+
+ This looks like 'Set([<list of elements>])'.
+ """
+ return self._repr()
+
+ # __str__ is the same as __repr__
+ __str__ = __repr__
+
+ def _repr(self, sorted=False):
+ elements = self._data.keys()
+ if sorted:
+ elements.sort()
+ return '%s(%r)' % (self.__class__.__name__, elements)
+
+ def __iter__(self):
+ """Return an iterator over the elements or a set.
+
+ This is the keys iterator for the underlying dict.
+ """
+ return self._data.iterkeys()
+
+ # Three-way comparison is not supported. However, because __eq__ is
+ # tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
+ # then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
+ # case).
+
+ def __cmp__(self, other):
+ raise TypeError, "can't compare sets using cmp()"
+
+ # Equality comparisons using the underlying dicts. Mixed-type comparisons
+ # are allowed here, where Set == z for non-Set z always returns False,
+ # and Set != z always True. This allows expressions like "x in y" to
+ # give the expected result when y is a sequence of mixed types, not
+ # raising a pointless TypeError just because y contains a Set, or x is
+ # a Set and y contain's a non-set ("in" invokes only __eq__).
+ # Subtle: it would be nicer if __eq__ and __ne__ could return
+ # NotImplemented instead of True or False. Then the other comparand
+ # would get a chance to determine the result, and if the other comparand
+ # also returned NotImplemented then it would fall back to object address
+ # comparison (which would always return False for __eq__ and always
+ # True for __ne__). However, that doesn't work, because this type
+ # *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
+ # Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
+
+ def __eq__(self, other):
+ if isinstance(other, BaseSet):
+ return self._data == other._data
+ else:
+ return False
+
+ def __ne__(self, other):
+ if isinstance(other, BaseSet):
+ return self._data != other._data
+ else:
+ return True
+
+ # Copying operations
+
+ def copy(self):
+ """Return a shallow copy of a set."""
+ result = self.__class__()
+ result._data.update(self._data)
+ return result
+
+ __copy__ = copy # For the copy module
+
+ def __deepcopy__(self, memo):
+ """Return a deep copy of a set; used by copy module."""
+ # This pre-creates the result and inserts it in the memo
+ # early, in case the deep copy recurses into another reference
+ # to this same set. A set can't be an element of itself, but
+ # it can certainly contain an object that has a reference to
+ # itself.
+ from copy import deepcopy
+ result = self.__class__()
+ memo[id(self)] = result
+ data = result._data
+ value = True
+ for elt in self:
+ data[deepcopy(elt, memo)] = value
+ return result
+
+ # Standard set operations: union, intersection, both differences.
+ # Each has an operator version (e.g. __or__, invoked with |) and a
+ # method version (e.g. union).
+ # Subtle: Each pair requires distinct code so that the outcome is
+ # correct when the type of other isn't suitable. For example, if
+ # we did "union = __or__" instead, then Set().union(3) would return
+ # NotImplemented instead of raising TypeError (albeit that *why* it
+ # raises TypeError as-is is also a bit subtle).
+
+ def __or__(self, other):
+ """Return the union of two sets as a new set.
+
+ (I.e. all elements that are in either set.)
+ """
+ if not isinstance(other, BaseSet):
+ return NotImplemented
+ return self.union(other)
+
+ def union(self, other):
+ """Return the union of two sets as a new set.
+
+ (I.e. all elements that are in either set.)
+ """
+ result = self.__class__(self)
+ result._update(other)
+ return result
+
+ def __and__(self, other):
+ """Return the intersection of two sets as a new set.
+
+ (I.e. all elements that are in both sets.)
+ """
+ if not isinstance(other, BaseSet):
+ return NotImplemented
+ return self.intersection(other)
+
+ def intersection(self, other):
+ """Return the intersection of two sets as a new set.
+
+ (I.e. all elements that are in both sets.)
+ """
+ if not isinstance(other, BaseSet):
+ other = Set(other)
+ if len(self) <= len(other):
+ little, big = self, other
+ else:
+ little, big = other, self
+ common = ifilter(big._data.has_key, little)
+ return self.__class__(common)
+
+ def __xor__(self, other):
+ """Return the symmetric difference of two sets as a new set.
+
+ (I.e. all elements that are in exactly one of the sets.)
+ """
+ if not isinstance(other, BaseSet):
+ return NotImplemented
+ return self.symmetric_difference(other)
+
+ def symmetric_difference(self, other):
+ """Return the symmetric difference of two sets as a new set.
+
+ (I.e. all elements that are in exactly one of the sets.)
+ """
+ result = self.__class__()
+ data = result._data
+ value = True
+ selfdata = self._data
+ try:
+ otherdata = other._data
+ except AttributeError:
+ otherdata = Set(other)._data
+ for elt in ifilterfalse(otherdata.has_key, selfdata):
+ data[elt] = value
+ for elt in ifilterfalse(selfdata.has_key, otherdata):
+ data[elt] = value
+ return result
+
+ def __sub__(self, other):
+ """Return the difference of two sets as a new Set.
+
+ (I.e. all elements that are in this set and not in the other.)
+ """
+ if not isinstance(other, BaseSet):
+ return NotImplemented
+ return self.difference(other)
+
+ def difference(self, other):
+ """Return the difference of two sets as a new Set.
+
+ (I.e. all elements that are in this set and not in the other.)
+ """
+ result = self.__class__()
+ data = result._data
+ try:
+ otherdata = other._data
+ except AttributeError:
+ otherdata = Set(other)._data
+ value = True
+ for elt in ifilterfalse(otherdata.has_key, self):
+ data[elt] = value
+ return result
+
+ # Membership test
+
+ def __contains__(self, element):
+ """Report whether an element is a member of a set.
+
+ (Called in response to the expression `element in self'.)
+ """
+ try:
+ return element in self._data
+ except TypeError:
+ transform = getattr(element, "__as_temporarily_immutable__", None)
+ if transform is None:
+ raise # re-raise the TypeError exception we caught
+ return transform() in self._data
+
+ # Subset and superset test
+
+ def issubset(self, other):
+ """Report whether another set contains this set."""
+ self._binary_sanity_check(other)
+ if len(self) > len(other): # Fast check for obvious cases
+ return False
+ for elt in ifilterfalse(other._data.has_key, self):
+ return False
+ return True
+
+ def issuperset(self, other):
+ """Report whether this set contains another set."""
+ self._binary_sanity_check(other)
+ if len(self) < len(other): # Fast check for obvious cases
+ return False
+ for elt in ifilterfalse(self._data.has_key, other):
+ return False
+ return True
+
+ # Inequality comparisons using the is-subset relation.
+ __le__ = issubset
+ __ge__ = issuperset
+
+ def __lt__(self, other):
+ self._binary_sanity_check(other)
+ return len(self) < len(other) and self.issubset(other)
+
+ def __gt__(self, other):
+ self._binary_sanity_check(other)
+ return len(self) > len(other) and self.issuperset(other)
+
+ # Assorted helpers
+
+ def _binary_sanity_check(self, other):
+ # Check that the other argument to a binary operation is also
+ # a set, raising a TypeError otherwise.
+ if not isinstance(other, BaseSet):
+ raise TypeError, "Binary operation only permitted between sets"
+
+ def _compute_hash(self):
+ # Calculate hash code for a set by xor'ing the hash codes of
+ # the elements. This ensures that the hash code does not depend
+ # on the order in which elements are added to the set. This is
+ # not called __hash__ because a BaseSet should not be hashable;
+ # only an ImmutableSet is hashable.
+ result = 0
+ for elt in self:
+ result ^= hash(elt)
+ return result
+
+ def _update(self, iterable):
+ # The main loop for update() and the subclass __init__() methods.
+ data = self._data
+
+ # Use the fast update() method when a dictionary is available.
+ if isinstance(iterable, BaseSet):
+ data.update(iterable._data)
+ return
+
+ value = True
+
+ if type(iterable) in (list, tuple, xrange):
+ # Optimized: we know that __iter__() and next() can't
+ # raise TypeError, so we can move 'try:' out of the loop.
+ it = iter(iterable)
+ while True:
+ try:
+ for element in it:
+ data[element] = value
+ return
+ except TypeError:
+ transform = getattr(element, "__as_immutable__", None)
+ if transform is None:
+ raise # re-raise the TypeError exception we caught
+ data[transform()] = value
+ else:
+ # Safe: only catch TypeError where intended
+ for element in iterable:
+ try:
+ data[element] = value
+ except TypeError:
+ transform = getattr(element, "__as_immutable__", None)
+ if transform is None:
+ raise # re-raise the TypeError exception we caught
+ data[transform()] = value
+
+
+class ImmutableSet(BaseSet):
+ """Immutable set class."""
+
+ __slots__ = ['_hashcode']
+
+ # BaseSet + hashing
+
+ def __init__(self, iterable=None):
+ """Construct an immutable set from an optional iterable."""
+ self._hashcode = None
+ self._data = {}
+ if iterable is not None:
+ self._update(iterable)
+
+ def __hash__(self):
+ if self._hashcode is None:
+ self._hashcode = self._compute_hash()
+ return self._hashcode
+
+ def __getstate__(self):
+ return self._data, self._hashcode
+
+ def __setstate__(self, state):
+ self._data, self._hashcode = state
+
+class Set(BaseSet):
+ """ Mutable set class."""
+
+ __slots__ = []
+
+ # BaseSet + operations requiring mutability; no hashing
+
+ def __init__(self, iterable=None):
+ """Construct a set from an optional iterable."""
+ self._data = {}
+ if iterable is not None:
+ self._update(iterable)
+
+ def __getstate__(self):
+ # getstate's results are ignored if it is not
+ return self._data,
+
+ def __setstate__(self, data):
+ self._data, = data
+
+ def __hash__(self):
+ """A Set cannot be hashed."""
+ # We inherit object.__hash__, so we must deny this explicitly
+ raise TypeError, "Can't hash a Set, only an ImmutableSet."
+
+ # In-place union, intersection, differences.
+ # Subtle: The xyz_update() functions deliberately return None,
+ # as do all mutating operations on built-in container types.
+ # The __xyz__ spellings have to return self, though.
+
+ def __ior__(self, other):
+ """Update a set with the union of itself and another."""
+ self._binary_sanity_check(other)
+ self._data.update(other._data)
+ return self
+
+ def union_update(self, other):
+ """Update a set with the union of itself and another."""
+ self._update(other)
+
+ def __iand__(self, other):
+ """Update a set with the intersection of itself and another."""
+ self._binary_sanity_check(other)
+ self._data = (self & other)._data
+ return self
+
+ def intersection_update(self, other):
+ """Update a set with the intersection of itself and another."""
+ if isinstance(other, BaseSet):
+ self &= other
+ else:
+ self._data = (self.intersection(other))._data
+
+ def __ixor__(self, other):
+ """Update a set with the symmetric difference of itself and another."""
+ self._binary_sanity_check(other)
+ self.symmetric_difference_update(other)
+ return self
+
+ def symmetric_difference_update(self, other):
+ """Update a set with the symmetric difference of itself and another."""
+ data = self._data
+ value = True
+ if not isinstance(other, BaseSet):
+ other = Set(other)
+ if self is other:
+ self.clear()
+ for elt in other:
+ if elt in data:
+ del data[elt]
+ else:
+ data[elt] = value
+
+ def __isub__(self, other):
+ """Remove all elements of another set from this set."""
+ self._binary_sanity_check(other)
+ self.difference_update(other)
+ return self
+
+ def difference_update(self, other):
+ """Remove all elements of another set from this set."""
+ data = self._data
+ if not isinstance(other, BaseSet):
+ other = Set(other)
+ if self is other:
+ self.clear()
+ for elt in ifilter(data.has_key, other):
+ del data[elt]
+
+ # Python dict-like mass mutations: update, clear
+
+ def update(self, iterable):
+ """Add all values from an iterable (such as a list or file)."""
+ self._update(iterable)
+
+ def clear(self):
+ """Remove all elements from this set."""
+ self._data.clear()
+
+ # Single-element mutations: add, remove, discard
+
+ def add(self, element):
+ """Add an element to a set.
+
+ This has no effect if the element is already present.
+ """
+ try:
+ self._data[element] = True
+ except TypeError:
+ transform = getattr(element, "__as_immutable__", None)
+ if transform is None:
+ raise # re-raise the TypeError exception we caught
+ self._data[transform()] = True
+
+ def remove(self, element):
+ """Remove an element from a set; it must be a member.
+
+ If the element is not a member, raise a KeyError.
+ """
+ try:
+ del self._data[element]
+ except TypeError:
+ transform = getattr(element, "__as_temporarily_immutable__", None)
+ if transform is None:
+ raise # re-raise the TypeError exception we caught
+ del self._data[transform()]
+
+ def discard(self, element):
+ """Remove an element from a set if it is a member.
+
+ If the element is not a member, do nothing.
+ """
+ try:
+ self.remove(element)
+ except KeyError:
+ pass
+
+ def pop(self):
+ """Remove and return an arbitrary set element."""
+ return self._data.popitem()[0]
+
+ def __as_immutable__(self):
+ # Return a copy of self as an immutable set
+ return ImmutableSet(self)
+
+ def __as_temporarily_immutable__(self):
+ # Return self wrapped in a temporarily immutable set
+ return _TemporarilyImmutableSet(self)
+
+
+class _TemporarilyImmutableSet(BaseSet):
+ # Wrap a mutable set as if it was temporarily immutable.
+ # This only supplies hashing and equality comparisons.
+
+ def __init__(self, set):
+ self._set = set
+ self._data = set._data # Needed by ImmutableSet.__eq__()
+
+ def __hash__(self):
+ return self._set._compute_hash()
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets15.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets15.py
new file mode 100644
index 000000000..1fe5a4f77
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_sets15.py
@@ -0,0 +1,170 @@
+#
+# A Set class that works all the way back to Python 1.5. From:
+#
+# Python Cookbook: Yet another Set class for Python
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/106469
+# Goncalo Rodriques
+#
+# This is a pure Pythonic implementation of a set class. The syntax
+# and methods implemented are, for the most part, borrowed from
+# PEP 218 by Greg Wilson.
+#
+# Note that this class violates the formal definition of a set() by adding
+# a __getitem__() method so we can iterate over a set's elements under
+# Python 1.5 and 2.1, which don't support __iter__() and iterator types.
+#
+
+import string
+
+class Set:
+ """The set class. It can contain mutable objects."""
+
+ def __init__(self, seq = None):
+ """The constructor. It can take any object giving an iterator as an optional
+ argument to populate the new set."""
+ self.elems = []
+ if seq:
+ for elem in seq:
+ if elem not in self.elems:
+ hash(elem)
+ self.elems.append(elem)
+
+ def __str__(self):
+ return "set([%s])" % string.join(map(str, self.elems), ", ")
+
+
+ def copy(self):
+ """Shallow copy of a set object."""
+ return Set(self.elems)
+
+ def __contains__(self, elem):
+ return elem in self.elems
+
+ def __len__(self):
+ return len(self.elems)
+
+ def __getitem__(self, index):
+ # Added so that Python 1.5 can iterate over the elements.
+ # The cookbook recipe's author didn't like this because there
+ # really isn't any order in a set object, but this is necessary
+ # to make the class work well enough for our purposes.
+ return self.elems[index]
+
+ def items(self):
+ """Returns a list of the elements in the set."""
+ return self.elems
+
+ def add(self, elem):
+ """Add one element to the set."""
+ if elem not in self.elems:
+ hash(elem)
+ self.elems.append(elem)
+
+ def remove(self, elem):
+ """Remove an element from the set. Return an error if elem is not in the set."""
+ try:
+ self.elems.remove(elem)
+ except ValueError:
+ raise LookupError, "Object %s is not a member of the set." % str(elem)
+
+ def discard(self, elem):
+ """Remove an element from the set. Do nothing if elem is not in the set."""
+ try:
+ self.elems.remove(elem)
+ except ValueError:
+ pass
+
+ def sort(self, func=cmp):
+ self.elems.sort(func)
+
+ #Define an iterator for a set.
+ def __iter__(self):
+ return iter(self.elems)
+
+ #The basic binary operations with sets.
+ def __or__(self, other):
+ """Union of two sets."""
+ ret = self.copy()
+ for elem in other.elems:
+ if elem not in ret:
+ ret.elems.append(elem)
+ return ret
+
+ def __sub__(self, other):
+ """Difference of two sets."""
+ ret = self.copy()
+ for elem in other.elems:
+ ret.discard(elem)
+ return ret
+
+ def __and__(self, other):
+ """Intersection of two sets."""
+ ret = Set()
+ for elem in self.elems:
+ if elem in other.elems:
+ ret.elems.append(elem)
+ return ret
+
+ def __add__(self, other):
+ """Symmetric difference of two sets."""
+ ret = Set()
+ temp = other.copy()
+ for elem in self.elems:
+ if elem in temp.elems:
+ temp.elems.remove(elem)
+ else:
+ ret.elems.append(elem)
+ #Add remaining elements.
+ for elem in temp.elems:
+ ret.elems.append(elem)
+ return ret
+
+ def __mul__(self, other):
+ """Cartesian product of two sets."""
+ ret = Set()
+ for elemself in self.elems:
+ x = map(lambda other, s=elemself: (s, other), other.elems)
+ ret.elems.extend(x)
+ return ret
+
+ #Some of the binary comparisons.
+ def __lt__(self, other):
+ """Returns 1 if the lhs set is contained but not equal to the rhs set."""
+ if len(self.elems) < len(other.elems):
+ temp = other.copy()
+ for elem in self.elems:
+ if elem in temp.elems:
+ temp.remove(elem)
+ else:
+ return 0
+ return len(temp.elems) == 0
+ else:
+ return 0
+
+ def __le__(self, other):
+ """Returns 1 if the lhs set is contained in the rhs set."""
+ if len(self.elems) <= len(other.elems):
+ ret = 1
+ for elem in self.elems:
+ if elem not in other.elems:
+ ret = 0
+ break
+ return ret
+ else:
+ return 0
+
+ def __eq__(self, other):
+ """Returns 1 if the sets are equal."""
+ if len(self.elems) != len(other.elems):
+ return 0
+ else:
+ return len(self - other) == 0
+
+ def __cmp__(self, other):
+ """Returns 1 if the sets are equal."""
+ if self.__lt__(other):
+ return -1
+ elif other.__lt__(self):
+ return 1
+ else:
+ return 0
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_shlex.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_shlex.py
new file mode 100644
index 000000000..d6c10357a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_shlex.py
@@ -0,0 +1,319 @@
+# -*- coding: iso-8859-1 -*-
+"""A lexical analyzer class for simple shell-like syntaxes."""
+
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
+# Input stacking and error message cleanup added by ESR, March 2000
+# push_source() and pop_source() made explicit by ESR, January 2001.
+# Posix compliance, split(), string arguments, and
+# iterator interface by Gustavo Niemeyer, April 2003.
+
+import os.path
+import sys
+#from collections import deque
+
+class deque:
+ def __init__(self):
+ self.data = []
+ def __len__(self):
+ return len(self.data)
+ def appendleft(self, item):
+ self.data.insert(0, item)
+ def popleft(self):
+ return self.data.pop(0)
+
+try:
+ basestring
+except NameError:
+ import types
+ def is_basestring(s):
+ return type(s) is types.StringType
+else:
+ def is_basestring(s):
+ return isinstance(s, basestring)
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+__all__ = ["shlex", "split"]
+
+class shlex:
+ "A lexical analyzer class for simple shell-like syntaxes."
+ def __init__(self, instream=None, infile=None, posix=False):
+ if is_basestring(instream):
+ instream = StringIO(instream)
+ if instream is not None:
+ self.instream = instream
+ self.infile = infile
+ else:
+ self.instream = sys.stdin
+ self.infile = None
+ self.posix = posix
+ if posix:
+ self.eof = None
+ else:
+ self.eof = ''
+ self.commenters = '#'
+ self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
+ 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
+ if self.posix:
+ self.wordchars = self.wordchars + ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ'
+ 'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ')
+ self.whitespace = ' \t\r\n'
+ self.whitespace_split = False
+ self.quotes = '\'"'
+ self.escape = '\\'
+ self.escapedquotes = '"'
+ self.state = ' '
+ self.pushback = deque()
+ self.lineno = 1
+ self.debug = 0
+ self.token = ''
+ self.filestack = deque()
+ self.source = None
+ if self.debug:
+ print 'shlex: reading from %s, line %d' \
+ % (self.instream, self.lineno)
+
+ def push_token(self, tok):
+ "Push a token onto the stack popped by the get_token method"
+ if self.debug >= 1:
+ print "shlex: pushing token " + repr(tok)
+ self.pushback.appendleft(tok)
+
+ def push_source(self, newstream, newfile=None):
+ "Push an input source onto the lexer's input source stack."
+ if is_basestring(newstream):
+ newstream = StringIO(newstream)
+ self.filestack.appendleft((self.infile, self.instream, self.lineno))
+ self.infile = newfile
+ self.instream = newstream
+ self.lineno = 1
+ if self.debug:
+ if newfile is not None:
+ print 'shlex: pushing to file %s' % (self.infile,)
+ else:
+ print 'shlex: pushing to stream %s' % (self.instream,)
+
+ def pop_source(self):
+ "Pop the input source stack."
+ self.instream.close()
+ (self.infile, self.instream, self.lineno) = self.filestack.popleft()
+ if self.debug:
+ print 'shlex: popping to %s, line %d' \
+ % (self.instream, self.lineno)
+ self.state = ' '
+
+ def get_token(self):
+ "Get a token from the input stream (or from stack if it's nonempty)"
+ if self.pushback:
+ tok = self.pushback.popleft()
+ if self.debug >= 1:
+ print "shlex: popping token " + repr(tok)
+ return tok
+ # No pushback. Get a token.
+ raw = self.read_token()
+ # Handle inclusions
+ if self.source is not None:
+ while raw == self.source:
+ spec = self.sourcehook(self.read_token())
+ if spec:
+ (newfile, newstream) = spec
+ self.push_source(newstream, newfile)
+ raw = self.get_token()
+ # Maybe we got EOF instead?
+ while raw == self.eof:
+ if not self.filestack:
+ return self.eof
+ else:
+ self.pop_source()
+ raw = self.get_token()
+ # Neither inclusion nor EOF
+ if self.debug >= 1:
+ if raw != self.eof:
+ print "shlex: token=" + repr(raw)
+ else:
+ print "shlex: token=EOF"
+ return raw
+
+ def read_token(self):
+ quoted = False
+ escapedstate = ' '
+ while True:
+ nextchar = self.instream.read(1)
+ if nextchar == '\n':
+ self.lineno = self.lineno + 1
+ if self.debug >= 3:
+ print "shlex: in state", repr(self.state), \
+ "I see character:", repr(nextchar)
+ if self.state is None:
+ self.token = '' # past end of file
+ break
+ elif self.state == ' ':
+ if not nextchar:
+ self.state = None # end of file
+ break
+ elif nextchar in self.whitespace:
+ if self.debug >= 2:
+ print "shlex: I see whitespace in whitespace state"
+ if self.token or (self.posix and quoted):
+ break # emit current token
+ else:
+ continue
+ elif nextchar in self.commenters:
+ self.instream.readline()
+ self.lineno = self.lineno + 1
+ elif self.posix and nextchar in self.escape:
+ escapedstate = 'a'
+ self.state = nextchar
+ elif nextchar in self.wordchars:
+ self.token = nextchar
+ self.state = 'a'
+ elif nextchar in self.quotes:
+ if not self.posix:
+ self.token = nextchar
+ self.state = nextchar
+ elif self.whitespace_split:
+ self.token = nextchar
+ self.state = 'a'
+ else:
+ self.token = nextchar
+ if self.token or (self.posix and quoted):
+ break # emit current token
+ else:
+ continue
+ elif self.state in self.quotes:
+ quoted = True
+ if not nextchar: # end of file
+ if self.debug >= 2:
+ print "shlex: I see EOF in quotes state"
+ # XXX what error should be raised here?
+ raise ValueError, "No closing quotation"
+ if nextchar == self.state:
+ if not self.posix:
+ self.token = self.token + nextchar
+ self.state = ' '
+ break
+ else:
+ self.state = 'a'
+ elif self.posix and nextchar in self.escape and \
+ self.state in self.escapedquotes:
+ escapedstate = self.state
+ self.state = nextchar
+ else:
+ self.token = self.token + nextchar
+ elif self.state in self.escape:
+ if not nextchar: # end of file
+ if self.debug >= 2:
+ print "shlex: I see EOF in escape state"
+ # XXX what error should be raised here?
+ raise ValueError, "No escaped character"
+ # In posix shells, only the quote itself or the escape
+ # character may be escaped within quotes.
+ if escapedstate in self.quotes and \
+ nextchar != self.state and nextchar != escapedstate:
+ self.token = self.token + self.state
+ self.token = self.token + nextchar
+ self.state = escapedstate
+ elif self.state == 'a':
+ if not nextchar:
+ self.state = None # end of file
+ break
+ elif nextchar in self.whitespace:
+ if self.debug >= 2:
+ print "shlex: I see whitespace in word state"
+ self.state = ' '
+ if self.token or (self.posix and quoted):
+ break # emit current token
+ else:
+ continue
+ elif nextchar in self.commenters:
+ self.instream.readline()
+ self.lineno = self.lineno + 1
+ if self.posix:
+ self.state = ' '
+ if self.token or (self.posix and quoted):
+ break # emit current token
+ else:
+ continue
+ elif self.posix and nextchar in self.quotes:
+ self.state = nextchar
+ elif self.posix and nextchar in self.escape:
+ escapedstate = 'a'
+ self.state = nextchar
+ elif nextchar in self.wordchars or nextchar in self.quotes \
+ or self.whitespace_split:
+ self.token = self.token + nextchar
+ else:
+ self.pushback.appendleft(nextchar)
+ if self.debug >= 2:
+ print "shlex: I see punctuation in word state"
+ self.state = ' '
+ if self.token:
+ break # emit current token
+ else:
+ continue
+ result = self.token
+ self.token = ''
+ if self.posix and not quoted and result == '':
+ result = None
+ if self.debug > 1:
+ if result:
+ print "shlex: raw token=" + repr(result)
+ else:
+ print "shlex: raw token=EOF"
+ return result
+
+ def sourcehook(self, newfile):
+ "Hook called on a filename to be sourced."
+ if newfile[0] == '"':
+ newfile = newfile[1:-1]
+ # This implements cpp-like semantics for relative-path inclusion.
+ if is_basestring(self.infile) and not os.path.isabs(newfile):
+ newfile = os.path.join(os.path.dirname(self.infile), newfile)
+ return (newfile, open(newfile, "r"))
+
+ def error_leader(self, infile=None, lineno=None):
+ "Emit a C-compiler-like, Emacs-friendly error-message leader."
+ if infile is None:
+ infile = self.infile
+ if lineno is None:
+ lineno = self.lineno
+ return "\"%s\", line %d: " % (infile, lineno)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ token = self.get_token()
+ if token == self.eof:
+ raise StopIteration
+ return token
+
+def split(s, comments=False):
+ lex = shlex(s, posix=True)
+ lex.whitespace_split = True
+ if not comments:
+ lex.commenters = ''
+ #return list(lex)
+ result = []
+ while True:
+ token = lex.get_token()
+ if token == lex.eof:
+ break
+ result.append(token)
+ return result
+
+if __name__ == '__main__':
+ if len(sys.argv) == 1:
+ lexer = shlex()
+ else:
+ file = sys.argv[1]
+ lexer = shlex(open(file), file)
+ while 1:
+ tt = lexer.get_token()
+ if tt:
+ print "Token: " + repr(tt)
+ else:
+ break
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_subprocess.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_subprocess.py
new file mode 100644
index 000000000..68d0e4c85
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_subprocess.py
@@ -0,0 +1,1290 @@
+# subprocess - Subprocesses with accessible I/O streams
+#
+# For more information about this module, see PEP 324.
+#
+# This module should remain compatible with Python 2.2, see PEP 291.
+#
+# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
+#
+# Licensed to PSF under a Contributor Agreement.
+# See http://www.python.org/2.4/license for licensing details.
+
+r"""subprocess - Subprocesses with accessible I/O streams
+
+This module allows you to spawn processes, connect to their
+input/output/error pipes, and obtain their return codes. This module
+intends to replace several other, older modules and functions, like:
+
+os.system
+os.spawn*
+os.popen*
+popen2.*
+commands.*
+
+Information about how the subprocess module can be used to replace these
+modules and functions can be found below.
+
+
+
+Using the subprocess module
+===========================
+This module defines one class called Popen:
+
+class Popen(args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0):
+
+
+Arguments are:
+
+args should be a string, or a sequence of program arguments. The
+program to execute is normally the first item in the args sequence or
+string, but can be explicitly set by using the executable argument.
+
+On UNIX, with shell=False (default): In this case, the Popen class
+uses os.execvp() to execute the child program. args should normally
+be a sequence. A string will be treated as a sequence with the string
+as the only item (the program to execute).
+
+On UNIX, with shell=True: If args is a string, it specifies the
+command string to execute through the shell. If args is a sequence,
+the first item specifies the command string, and any additional items
+will be treated as additional shell arguments.
+
+On Windows: the Popen class uses CreateProcess() to execute the child
+program, which operates on strings. If args is a sequence, it will be
+converted to a string using the list2cmdline method. Please note that
+not all MS Windows applications interpret the command line the same
+way: The list2cmdline is designed for applications using the same
+rules as the MS C runtime.
+
+bufsize, if given, has the same meaning as the corresponding argument
+to the built-in open() function: 0 means unbuffered, 1 means line
+buffered, any other positive value means use a buffer of
+(approximately) that size. A negative bufsize means to use the system
+default, which usually means fully buffered. The default value for
+bufsize is 0 (unbuffered).
+
+stdin, stdout and stderr specify the executed programs' standard
+input, standard output and standard error file handles, respectively.
+Valid values are PIPE, an existing file descriptor (a positive
+integer), an existing file object, and None. PIPE indicates that a
+new pipe to the child should be created. With None, no redirection
+will occur; the child's file handles will be inherited from the
+parent. Additionally, stderr can be STDOUT, which indicates that the
+stderr data from the applications should be captured into the same
+file handle as for stdout.
+
+If preexec_fn is set to a callable object, this object will be called
+in the child process just before the child is executed.
+
+If close_fds is true, all file descriptors except 0, 1 and 2 will be
+closed before the child process is executed.
+
+if shell is true, the specified command will be executed through the
+shell.
+
+If cwd is not None, the current directory will be changed to cwd
+before the child is executed.
+
+If env is not None, it defines the environment variables for the new
+process.
+
+If universal_newlines is true, the file objects stdout and stderr are
+opened as a text files, but lines may be terminated by any of '\n',
+the Unix end-of-line convention, '\r', the Macintosh convention or
+'\r\n', the Windows convention. All of these external representations
+are seen as '\n' by the Python program. Note: This feature is only
+available if Python is built with universal newline support (the
+default). Also, the newlines attribute of the file objects stdout,
+stdin and stderr are not updated by the communicate() method.
+
+The startupinfo and creationflags, if given, will be passed to the
+underlying CreateProcess() function. They can specify things such as
+appearance of the main window and priority for the new process.
+(Windows only)
+
+
+This module also defines two shortcut functions:
+
+call(*popenargs, **kwargs):
+ Run command with arguments. Wait for command to complete, then
+ return the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ retcode = call(["ls", "-l"])
+
+check_call(*popenargs, **kwargs):
+ Run command with arguments. Wait for command to complete. If the
+ exit code was zero then return, otherwise raise
+ CalledProcessError. The CalledProcessError object will have the
+ return code in the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ check_call(["ls", "-l"])
+
+Exceptions
+----------
+Exceptions raised in the child process, before the new program has
+started to execute, will be re-raised in the parent. Additionally,
+the exception object will have one extra attribute called
+'child_traceback', which is a string containing traceback information
+from the childs point of view.
+
+The most common exception raised is OSError. This occurs, for
+example, when trying to execute a non-existent file. Applications
+should prepare for OSErrors.
+
+A ValueError will be raised if Popen is called with invalid arguments.
+
+check_call() will raise CalledProcessError, if the called process
+returns a non-zero return code.
+
+
+Security
+--------
+Unlike some other popen functions, this implementation will never call
+/bin/sh implicitly. This means that all characters, including shell
+metacharacters, can safely be passed to child processes.
+
+
+Popen objects
+=============
+Instances of the Popen class have the following methods:
+
+poll()
+ Check if child process has terminated. Returns returncode
+ attribute.
+
+wait()
+ Wait for child process to terminate. Returns returncode attribute.
+
+communicate(input=None)
+ Interact with process: Send data to stdin. Read data from stdout
+ and stderr, until end-of-file is reached. Wait for process to
+ terminate. The optional stdin argument should be a string to be
+ sent to the child process, or None, if no data should be sent to
+ the child.
+
+ communicate() returns a tuple (stdout, stderr).
+
+ Note: The data read is buffered in memory, so do not use this
+ method if the data size is large or unlimited.
+
+The following attributes are also available:
+
+stdin
+ If the stdin argument is PIPE, this attribute is a file object
+ that provides input to the child process. Otherwise, it is None.
+
+stdout
+ If the stdout argument is PIPE, this attribute is a file object
+ that provides output from the child process. Otherwise, it is
+ None.
+
+stderr
+ If the stderr argument is PIPE, this attribute is file object that
+ provides error output from the child process. Otherwise, it is
+ None.
+
+pid
+ The process ID of the child process.
+
+returncode
+ The child return code. A None value indicates that the process
+ hasn't terminated yet. A negative value -N indicates that the
+ child was terminated by signal N (UNIX only).
+
+
+Replacing older functions with the subprocess module
+====================================================
+In this section, "a ==> b" means that b can be used as a replacement
+for a.
+
+Note: All functions in this section fail (more or less) silently if
+the executed program cannot be found; this module raises an OSError
+exception.
+
+In the following examples, we assume that the subprocess module is
+imported with "from subprocess import *".
+
+
+Replacing /bin/sh shell backquote
+---------------------------------
+output=`mycmd myarg`
+==>
+output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
+
+
+Replacing shell pipe line
+-------------------------
+output=`dmesg | grep hda`
+==>
+p1 = Popen(["dmesg"], stdout=PIPE)
+p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+output = p2.communicate()[0]
+
+
+Replacing os.system()
+---------------------
+sts = os.system("mycmd" + " myarg")
+==>
+p = Popen("mycmd" + " myarg", shell=True)
+pid, sts = os.waitpid(p.pid, 0)
+
+Note:
+
+* Calling the program through the shell is usually not required.
+
+* It's easier to look at the returncode attribute than the
+ exitstatus.
+
+A more real-world example would look like this:
+
+try:
+ retcode = call("mycmd" + " myarg", shell=True)
+ if retcode < 0:
+ print >>sys.stderr, "Child was terminated by signal", -retcode
+ else:
+ print >>sys.stderr, "Child returned", retcode
+except OSError, e:
+ print >>sys.stderr, "Execution failed:", e
+
+
+Replacing os.spawn*
+-------------------
+P_NOWAIT example:
+
+pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+pid = Popen(["/bin/mycmd", "myarg"]).pid
+
+
+P_WAIT example:
+
+retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+retcode = call(["/bin/mycmd", "myarg"])
+
+
+Vector example:
+
+os.spawnvp(os.P_NOWAIT, path, args)
+==>
+Popen([path] + args[1:])
+
+
+Environment example:
+
+os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
+==>
+Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
+
+
+Replacing os.popen*
+-------------------
+pipe = os.popen(cmd, mode='r', bufsize)
+==>
+pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout
+
+pipe = os.popen(cmd, mode='w', bufsize)
+==>
+pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin
+
+
+(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+ stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdin, child_stdout) = (p.stdin, p.stdout)
+
+
+(child_stdin,
+ child_stdout,
+ child_stderr) = os.popen3(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+ stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
+(child_stdin,
+ child_stdout,
+ child_stderr) = (p.stdin, p.stdout, p.stderr)
+
+
+(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+ stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
+(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout)
+
+
+Replacing popen2.*
+------------------
+Note: If the cmd argument to popen2 functions is a string, the command
+is executed through /bin/sh. If it is a list, the command is directly
+executed.
+
+(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode)
+==>
+p = Popen(["somestring"], shell=True, bufsize=bufsize
+ stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdout, child_stdin) = (p.stdout, p.stdin)
+
+
+(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode)
+==>
+p = Popen(["mycmd", "myarg"], bufsize=bufsize,
+ stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdout, child_stdin) = (p.stdout, p.stdin)
+
+The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen,
+except that:
+
+* subprocess.Popen raises an exception if the execution fails
+* the capturestderr argument is replaced with the stderr argument.
+* stdin=PIPE and stdout=PIPE must be specified.
+* popen2 closes all filedescriptors by default, but you have to specify
+ close_fds=True with subprocess.Popen.
+
+
+"""
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import os
+import string
+import types
+import traceback
+
+# Exception classes used by this module.
+class CalledProcessError(Exception):
+ """This exception is raised when a process run by check_call() returns
+ a non-zero exit status. The exit status will be stored in the
+ returncode attribute."""
+ def __init__(self, returncode, cmd):
+ self.returncode = returncode
+ self.cmd = cmd
+ def __str__(self):
+ return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
+
+
+if mswindows:
+ try:
+ import threading
+ except ImportError:
+ # SCons: the threading module is only used by the communicate()
+ # method, which we don't actually use, so don't worry if we
+ # can't import it.
+ pass
+ import msvcrt
+ if 0: # <-- change this to use pywin32 instead of the _subprocess driver
+ import pywintypes
+ from win32api import GetStdHandle, STD_INPUT_HANDLE, \
+ STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
+ from win32api import GetCurrentProcess, DuplicateHandle, \
+ GetModuleFileName, GetVersion
+ from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
+ from win32pipe import CreatePipe
+ from win32process import CreateProcess, STARTUPINFO, \
+ GetExitCodeProcess, STARTF_USESTDHANDLES, \
+ STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
+ from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
+ else:
+ # SCons: don't die on Python versions that don't have _subprocess.
+ try:
+ from _subprocess import *
+ except ImportError:
+ pass
+ class STARTUPINFO:
+ dwFlags = 0
+ hStdInput = None
+ hStdOutput = None
+ hStdError = None
+ wShowWindow = 0
+ class pywintypes:
+ error = IOError
+else:
+ import select
+ import errno
+ import fcntl
+ import pickle
+
+ try:
+ fcntl.F_GETFD
+ except AttributeError:
+ fcntl.F_GETFD = 1
+
+ try:
+ fcntl.F_SETFD
+ except AttributeError:
+ fcntl.F_SETFD = 2
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
+
+try:
+ MAXFD = os.sysconf("SC_OPEN_MAX")
+except KeyboardInterrupt:
+ raise # SCons: don't swallow keyboard interrupts
+except:
+ MAXFD = 256
+
+# True/False does not exist on 2.2.0
+try:
+ False
+except NameError:
+ False = 0
+ True = 1
+
+try:
+ isinstance(1, int)
+except TypeError:
+ def is_int(obj):
+ return type(obj) == type(1)
+ def is_int_or_long(obj):
+ return type(obj) in (type(1), type(1L))
+else:
+ def is_int(obj):
+ return isinstance(obj, int)
+ def is_int_or_long(obj):
+ return isinstance(obj, (int, long))
+
+try:
+ types.StringTypes
+except AttributeError:
+ try:
+ types.StringTypes = (types.StringType, types.UnicodeType)
+ except AttributeError:
+ types.StringTypes = (types.StringType,)
+ def is_string(obj):
+ return type(obj) in types.StringTypes
+else:
+ def is_string(obj):
+ return isinstance(obj, types.StringTypes)
+
+_active = []
+
+def _cleanup():
+ for inst in _active[:]:
+ if inst.poll(_deadstate=sys.maxint) >= 0:
+ try:
+ _active.remove(inst)
+ except ValueError:
+ # This can happen if two threads create a new Popen instance.
+ # It's harmless that it was already removed, so ignore.
+ pass
+
+PIPE = -1
+STDOUT = -2
+
+
+def call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete, then
+ return the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ retcode = call(["ls", "-l"])
+ """
+ return apply(Popen, popenargs, kwargs).wait()
+
+
+def check_call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete. If
+ the exit code was zero then return, otherwise raise
+ CalledProcessError. The CalledProcessError object will have the
+ return code in the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ check_call(["ls", "-l"])
+ """
+ retcode = apply(call, popenargs, kwargs)
+ cmd = kwargs.get("args")
+ if cmd is None:
+ cmd = popenargs[0]
+ if retcode:
+ raise CalledProcessError(retcode, cmd)
+ return retcode
+
+
+def list2cmdline(seq):
+ """
+ Translate a sequence of arguments into a command line
+ string, using the same rules as the MS C runtime:
+
+ 1) Arguments are delimited by white space, which is either a
+ space or a tab.
+
+ 2) A string surrounded by double quotation marks is
+ interpreted as a single argument, regardless of white space
+ contained within. A quoted string can be embedded in an
+ argument.
+
+ 3) A double quotation mark preceded by a backslash is
+ interpreted as a literal double quotation mark.
+
+ 4) Backslashes are interpreted literally, unless they
+ immediately precede a double quotation mark.
+
+ 5) If backslashes immediately precede a double quotation mark,
+ every pair of backslashes is interpreted as a literal
+ backslash. If the number of backslashes is odd, the last
+ backslash escapes the next double quotation mark as
+ described in rule 3.
+ """
+
+ # See
+ # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
+ result = []
+ needquote = False
+ for arg in seq:
+ bs_buf = []
+
+ # Add a space to separate this argument from the others
+ if result:
+ result.append(' ')
+
+ needquote = (" " in arg) or ("\t" in arg)
+ if needquote:
+ result.append('"')
+
+ for c in arg:
+ if c == '\\':
+ # Don't know if we need to double yet.
+ bs_buf.append(c)
+ elif c == '"':
+ # Double backspaces.
+ result.append('\\' * len(bs_buf)*2)
+ bs_buf = []
+ result.append('\\"')
+ else:
+ # Normal char
+ if bs_buf:
+ result.extend(bs_buf)
+ bs_buf = []
+ result.append(c)
+
+ # Add remaining backspaces, if any.
+ if bs_buf:
+ result.extend(bs_buf)
+
+ if needquote:
+ result.extend(bs_buf)
+ result.append('"')
+
+ return string.join(result, '')
+
+
+try:
+ object
+except NameError:
+ class object:
+ pass
+
+class Popen(object):
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=False, shell=False,
+ cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0):
+ """Create new Popen instance."""
+ _cleanup()
+
+ self._child_created = False
+ if not is_int_or_long(bufsize):
+ raise TypeError("bufsize must be an integer")
+
+ if mswindows:
+ if preexec_fn is not None:
+ raise ValueError("preexec_fn is not supported on Windows "
+ "platforms")
+ if close_fds:
+ raise ValueError("close_fds is not supported on Windows "
+ "platforms")
+ else:
+ # POSIX
+ if startupinfo is not None:
+ raise ValueError("startupinfo is only supported on Windows "
+ "platforms")
+ if creationflags != 0:
+ raise ValueError("creationflags is only supported on Windows "
+ "platforms")
+
+ self.stdin = None
+ self.stdout = None
+ self.stderr = None
+ self.pid = None
+ self.returncode = None
+ self.universal_newlines = universal_newlines
+
+ # Input and output objects. The general principle is like
+ # this:
+ #
+ # Parent Child
+ # ------ -----
+ # p2cwrite ---stdin---> p2cread
+ # c2pread <--stdout--- c2pwrite
+ # errread <--stderr--- errwrite
+ #
+ # On POSIX, the child objects are file descriptors. On
+ # Windows, these are Windows file handles. The parent objects
+ # are file descriptors on both platforms. The parent objects
+ # are None when not using PIPEs. The child objects are None
+ # when not redirecting.
+
+ (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+ self._execute_child(args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
+ if p2cwrite:
+ self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
+ if c2pread:
+ if universal_newlines:
+ self.stdout = os.fdopen(c2pread, 'rU', bufsize)
+ else:
+ self.stdout = os.fdopen(c2pread, 'rb', bufsize)
+ if errread:
+ if universal_newlines:
+ self.stderr = os.fdopen(errread, 'rU', bufsize)
+ else:
+ self.stderr = os.fdopen(errread, 'rb', bufsize)
+
+
+ def _translate_newlines(self, data):
+ data = data.replace("\r\n", "\n")
+ data = data.replace("\r", "\n")
+ return data
+
+
+ def __del__(self):
+ if not self._child_created:
+ # We didn't get to successfully create a child process.
+ return
+ # In case the child hasn't been waited on, check if it's done.
+ self.poll(_deadstate=sys.maxint)
+ if self.returncode is None and _active is not None:
+ # Child is still running, keep us alive until we can wait on it.
+ _active.append(self)
+
+
+ def communicate(self, input=None):
+ """Interact with process: Send data to stdin. Read data from
+ stdout and stderr, until end-of-file is reached. Wait for
+ process to terminate. The optional input argument should be a
+ string to be sent to the child process, or None, if no data
+ should be sent to the child.
+
+ communicate() returns a tuple (stdout, stderr)."""
+
+ # Optimization: If we are only using one pipe, or no pipe at
+ # all, using select() or threads is unnecessary.
+ if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
+ stdout = None
+ stderr = None
+ if self.stdin:
+ if input:
+ self.stdin.write(input)
+ self.stdin.close()
+ elif self.stdout:
+ stdout = self.stdout.read()
+ elif self.stderr:
+ stderr = self.stderr.read()
+ self.wait()
+ return (stdout, stderr)
+
+ return self._communicate(input)
+
+
+ if mswindows:
+ #
+ # Windows methods
+ #
+ def _get_handles(self, stdin, stdout, stderr):
+ """Construct and return tupel with IO objects:
+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+ """
+ if stdin is None and stdout is None and stderr is None:
+ return (None, None, None, None, None, None)
+
+ p2cread, p2cwrite = None, None
+ c2pread, c2pwrite = None, None
+ errread, errwrite = None, None
+
+ if stdin is None:
+ p2cread = GetStdHandle(STD_INPUT_HANDLE)
+ elif stdin == PIPE:
+ p2cread, p2cwrite = CreatePipe(None, 0)
+ # Detach and turn into fd
+ p2cwrite = p2cwrite.Detach()
+ p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
+ elif is_int(stdin):
+ p2cread = msvcrt.get_osfhandle(stdin)
+ else:
+ # Assuming file-like object
+ p2cread = msvcrt.get_osfhandle(stdin.fileno())
+ p2cread = self._make_inheritable(p2cread)
+
+ if stdout is None:
+ c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
+ elif stdout == PIPE:
+ c2pread, c2pwrite = CreatePipe(None, 0)
+ # Detach and turn into fd
+ c2pread = c2pread.Detach()
+ c2pread = msvcrt.open_osfhandle(c2pread, 0)
+ elif is_int(stdout):
+ c2pwrite = msvcrt.get_osfhandle(stdout)
+ else:
+ # Assuming file-like object
+ c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+ c2pwrite = self._make_inheritable(c2pwrite)
+
+ if stderr is None:
+ errwrite = GetStdHandle(STD_ERROR_HANDLE)
+ elif stderr == PIPE:
+ errread, errwrite = CreatePipe(None, 0)
+ # Detach and turn into fd
+ errread = errread.Detach()
+ errread = msvcrt.open_osfhandle(errread, 0)
+ elif stderr == STDOUT:
+ errwrite = c2pwrite
+ elif is_int(stderr):
+ errwrite = msvcrt.get_osfhandle(stderr)
+ else:
+ # Assuming file-like object
+ errwrite = msvcrt.get_osfhandle(stderr.fileno())
+ errwrite = self._make_inheritable(errwrite)
+
+ return (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
+
+ def _make_inheritable(self, handle):
+ """Return a duplicate of handle, which is inheritable"""
+ return DuplicateHandle(GetCurrentProcess(), handle,
+ GetCurrentProcess(), 0, 1,
+ DUPLICATE_SAME_ACCESS)
+
+
+ def _find_w9xpopen(self):
+ """Find and return absolut path to w9xpopen.exe"""
+ w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
+ "w9xpopen.exe")
+ if not os.path.exists(w9xpopen):
+ # Eeek - file-not-found - possibly an embedding
+ # situation - see if we can locate it in sys.exec_prefix
+ w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
+ "w9xpopen.exe")
+ if not os.path.exists(w9xpopen):
+ raise RuntimeError("Cannot locate w9xpopen.exe, which is "
+ "needed for Popen to work with your "
+ "shell or platform.")
+ return w9xpopen
+
+
+ def _execute_child(self, args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite):
+ """Execute program (MS Windows version)"""
+
+ if not isinstance(args, types.StringTypes):
+ args = list2cmdline(args)
+
+ # Process startup details
+ if startupinfo is None:
+ startupinfo = STARTUPINFO()
+ if None not in (p2cread, c2pwrite, errwrite):
+ startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESTDHANDLES
+ startupinfo.hStdInput = p2cread
+ startupinfo.hStdOutput = c2pwrite
+ startupinfo.hStdError = errwrite
+
+ if shell:
+ startupinfo.dwFlags = startupinfo.dwFlags | STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = SW_HIDE
+ comspec = os.environ.get("COMSPEC", "cmd.exe")
+ args = comspec + " /c " + args
+ if (GetVersion() >= 0x80000000L or
+ os.path.basename(comspec).lower() == "command.com"):
+ # Win9x, or using command.com on NT. We need to
+ # use the w9xpopen intermediate program. For more
+ # information, see KB Q150956
+ # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
+ w9xpopen = self._find_w9xpopen()
+ args = '"%s" %s' % (w9xpopen, args)
+ # Not passing CREATE_NEW_CONSOLE has been known to
+ # cause random failures on win9x. Specifically a
+ # dialog: "Your program accessed mem currently in
+ # use at xxx" and a hopeful warning about the
+ # stability of your system. Cost is Ctrl+C wont
+ # kill children.
+ creationflags = creationflags | CREATE_NEW_CONSOLE
+
+ # Start the process
+ try:
+ hp, ht, pid, tid = CreateProcess(executable, args,
+ # no special security
+ None, None,
+ # must inherit handles to pass std
+ # handles
+ 1,
+ creationflags,
+ env,
+ cwd,
+ startupinfo)
+ except pywintypes.error, e:
+ # Translate pywintypes.error to WindowsError, which is
+ # a subclass of OSError. FIXME: We should really
+ # translate errno using _sys_errlist (or simliar), but
+ # how can this be done from Python?
+ raise apply(WindowsError, e.args)
+
+ # Retain the process handle, but close the thread handle
+ self._child_created = True
+ self._handle = hp
+ self.pid = pid
+ ht.Close()
+
+ # Child is launched. Close the parent's copy of those pipe
+ # handles that only the child should have open. You need
+ # to make sure that no handles to the write end of the
+ # output pipe are maintained in this process or else the
+ # pipe will not close when the child process exits and the
+ # ReadFile will hang.
+ if p2cread is not None:
+ p2cread.Close()
+ if c2pwrite is not None:
+ c2pwrite.Close()
+ if errwrite is not None:
+ errwrite.Close()
+
+
+ def poll(self, _deadstate=None):
+ """Check if child process has terminated. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
+ self.returncode = GetExitCodeProcess(self._handle)
+ return self.returncode
+
+
+ def wait(self):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ obj = WaitForSingleObject(self._handle, INFINITE)
+ self.returncode = GetExitCodeProcess(self._handle)
+ return self.returncode
+
+
+ def _readerthread(self, fh, buffer):
+ buffer.append(fh.read())
+
+
+ def _communicate(self, input):
+ stdout = None # Return
+ stderr = None # Return
+
+ if self.stdout:
+ stdout = []
+ stdout_thread = threading.Thread(target=self._readerthread,
+ args=(self.stdout, stdout))
+ stdout_thread.setDaemon(True)
+ stdout_thread.start()
+ if self.stderr:
+ stderr = []
+ stderr_thread = threading.Thread(target=self._readerthread,
+ args=(self.stderr, stderr))
+ stderr_thread.setDaemon(True)
+ stderr_thread.start()
+
+ if self.stdin:
+ if input is not None:
+ self.stdin.write(input)
+ self.stdin.close()
+
+ if self.stdout:
+ stdout_thread.join()
+ if self.stderr:
+ stderr_thread.join()
+
+ # All data exchanged. Translate lists into strings.
+ if stdout is not None:
+ stdout = stdout[0]
+ if stderr is not None:
+ stderr = stderr[0]
+
+ # Translate newlines, if requested. We cannot let the file
+ # object do the translation: It is based on stdio, which is
+ # impossible to combine with select (unless forcing no
+ # buffering).
+ if self.universal_newlines and hasattr(file, 'newlines'):
+ if stdout:
+ stdout = self._translate_newlines(stdout)
+ if stderr:
+ stderr = self._translate_newlines(stderr)
+
+ self.wait()
+ return (stdout, stderr)
+
+ else:
+ #
+ # POSIX methods
+ #
+ def _get_handles(self, stdin, stdout, stderr):
+ """Construct and return tupel with IO objects:
+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+ """
+ p2cread, p2cwrite = None, None
+ c2pread, c2pwrite = None, None
+ errread, errwrite = None, None
+
+ if stdin is None:
+ pass
+ elif stdin == PIPE:
+ p2cread, p2cwrite = os.pipe()
+ elif is_int(stdin):
+ p2cread = stdin
+ else:
+ # Assuming file-like object
+ p2cread = stdin.fileno()
+
+ if stdout is None:
+ pass
+ elif stdout == PIPE:
+ c2pread, c2pwrite = os.pipe()
+ elif is_int(stdout):
+ c2pwrite = stdout
+ else:
+ # Assuming file-like object
+ c2pwrite = stdout.fileno()
+
+ if stderr is None:
+ pass
+ elif stderr == PIPE:
+ errread, errwrite = os.pipe()
+ elif stderr == STDOUT:
+ errwrite = c2pwrite
+ elif is_int(stderr):
+ errwrite = stderr
+ else:
+ # Assuming file-like object
+ errwrite = stderr.fileno()
+
+ return (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
+
+ def _set_cloexec_flag(self, fd):
+ try:
+ cloexec_flag = fcntl.FD_CLOEXEC
+ except AttributeError:
+ cloexec_flag = 1
+
+ old = fcntl.fcntl(fd, fcntl.F_GETFD)
+ fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
+
+
+ def _close_fds(self, but):
+ for i in xrange(3, MAXFD):
+ if i == but:
+ continue
+ try:
+ os.close(i)
+ except KeyboardInterrupt:
+ raise # SCons: don't swallow keyboard interrupts
+ except:
+ pass
+
+
+ def _execute_child(self, args, executable, preexec_fn, close_fds,
+ cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite):
+ """Execute program (POSIX version)"""
+
+ if is_string(args):
+ args = [args]
+
+ if shell:
+ args = ["/bin/sh", "-c"] + args
+
+ if executable is None:
+ executable = args[0]
+
+ # For transferring possible exec failure from child to parent
+ # The first char specifies the exception type: 0 means
+ # OSError, 1 means some other error.
+ errpipe_read, errpipe_write = os.pipe()
+ self._set_cloexec_flag(errpipe_write)
+
+ self.pid = os.fork()
+ self._child_created = True
+ if self.pid == 0:
+ # Child
+ try:
+ # Close parent's pipe ends
+ if p2cwrite:
+ os.close(p2cwrite)
+ if c2pread:
+ os.close(c2pread)
+ if errread:
+ os.close(errread)
+ os.close(errpipe_read)
+
+ # Dup fds for child
+ if p2cread:
+ os.dup2(p2cread, 0)
+ if c2pwrite:
+ os.dup2(c2pwrite, 1)
+ if errwrite:
+ os.dup2(errwrite, 2)
+
+ # Close pipe fds. Make sure we don't close the same
+ # fd more than once, or standard fds.
+ try:
+ set
+ except NameError:
+ # Fall-back for earlier Python versions, so epydoc
+ # can use this module directly to execute things.
+ if p2cread:
+ os.close(p2cread)
+ if c2pwrite and c2pwrite not in (p2cread,):
+ os.close(c2pwrite)
+ if errwrite and errwrite not in (p2cread, c2pwrite):
+ os.close(errwrite)
+ else:
+ for fd in set((p2cread, c2pwrite, errwrite))-set((0,1,2)):
+ if fd: os.close(fd)
+
+ # Close all other fds, if asked for
+ if close_fds:
+ self._close_fds(but=errpipe_write)
+
+ if cwd is not None:
+ os.chdir(cwd)
+
+ if preexec_fn:
+ apply(preexec_fn)
+
+ if env is None:
+ os.execvp(executable, args)
+ else:
+ os.execvpe(executable, args, env)
+
+ except KeyboardInterrupt:
+ raise # SCons: don't swallow keyboard interrupts
+
+ except:
+ exc_type, exc_value, tb = sys.exc_info()
+ # Save the traceback and attach it to the exception object
+ exc_lines = traceback.format_exception(exc_type,
+ exc_value,
+ tb)
+ exc_value.child_traceback = string.join(exc_lines, '')
+ os.write(errpipe_write, pickle.dumps(exc_value))
+
+ # This exitcode won't be reported to applications, so it
+ # really doesn't matter what we return.
+ os._exit(255)
+
+ # Parent
+ os.close(errpipe_write)
+ if p2cread and p2cwrite:
+ os.close(p2cread)
+ if c2pwrite and c2pread:
+ os.close(c2pwrite)
+ if errwrite and errread:
+ os.close(errwrite)
+
+ # Wait for exec to fail or succeed; possibly raising exception
+ data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB
+ os.close(errpipe_read)
+ if data != "":
+ os.waitpid(self.pid, 0)
+ child_exception = pickle.loads(data)
+ raise child_exception
+
+
+ def _handle_exitstatus(self, sts):
+ if os.WIFSIGNALED(sts):
+ self.returncode = -os.WTERMSIG(sts)
+ elif os.WIFEXITED(sts):
+ self.returncode = os.WEXITSTATUS(sts)
+ else:
+ # Should never happen
+ raise RuntimeError("Unknown child exit status!")
+
+
+ def poll(self, _deadstate=None):
+ """Check if child process has terminated. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ try:
+ pid, sts = os.waitpid(self.pid, os.WNOHANG)
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ except os.error:
+ if _deadstate is not None:
+ self.returncode = _deadstate
+ return self.returncode
+
+
+ def wait(self):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is None:
+ pid, sts = os.waitpid(self.pid, 0)
+ self._handle_exitstatus(sts)
+ return self.returncode
+
+
+ def _communicate(self, input):
+ read_set = []
+ write_set = []
+ stdout = None # Return
+ stderr = None # Return
+
+ if self.stdin:
+ # Flush stdio buffer. This might block, if the user has
+ # been writing to .stdin in an uncontrolled fashion.
+ self.stdin.flush()
+ if input:
+ write_set.append(self.stdin)
+ else:
+ self.stdin.close()
+ if self.stdout:
+ read_set.append(self.stdout)
+ stdout = []
+ if self.stderr:
+ read_set.append(self.stderr)
+ stderr = []
+
+ input_offset = 0
+ while read_set or write_set:
+ rlist, wlist, xlist = select.select(read_set, write_set, [])
+
+ if self.stdin in wlist:
+ # When select has indicated that the file is writable,
+ # we can write up to PIPE_BUF bytes without risk
+ # blocking. POSIX defines PIPE_BUF >= 512
+ bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
+ input_offset = input_offset + bytes_written
+ if input_offset >= len(input):
+ self.stdin.close()
+ write_set.remove(self.stdin)
+
+ if self.stdout in rlist:
+ data = os.read(self.stdout.fileno(), 1024)
+ if data == "":
+ self.stdout.close()
+ read_set.remove(self.stdout)
+ stdout.append(data)
+
+ if self.stderr in rlist:
+ data = os.read(self.stderr.fileno(), 1024)
+ if data == "":
+ self.stderr.close()
+ read_set.remove(self.stderr)
+ stderr.append(data)
+
+ # All data exchanged. Translate lists into strings.
+ if stdout is not None:
+ stdout = string.join(stdout, '')
+ if stderr is not None:
+ stderr = string.join(stderr, '')
+
+ # Translate newlines, if requested. We cannot let the file
+ # object do the translation: It is based on stdio, which is
+ # impossible to combine with select (unless forcing no
+ # buffering).
+ if self.universal_newlines and hasattr(file, 'newlines'):
+ if stdout:
+ stdout = self._translate_newlines(stdout)
+ if stderr:
+ stderr = self._translate_newlines(stderr)
+
+ self.wait()
+ return (stdout, stderr)
+
+
+def _demo_posix():
+ #
+ # Example 1: Simple redirection: Get process list
+ #
+ plist = Popen(["ps"], stdout=PIPE).communicate()[0]
+ print "Process list:"
+ print plist
+
+ #
+ # Example 2: Change uid before executing child
+ #
+ if os.getuid() == 0:
+ p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
+ p.wait()
+
+ #
+ # Example 3: Connecting several subprocesses
+ #
+ print "Looking for 'hda'..."
+ p1 = Popen(["dmesg"], stdout=PIPE)
+ p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+ print repr(p2.communicate()[0])
+
+ #
+ # Example 4: Catch execution error
+ #
+ print
+ print "Trying a weird file..."
+ try:
+ print Popen(["/this/path/does/not/exist"]).communicate()
+ except OSError, e:
+ if e.errno == errno.ENOENT:
+ print "The file didn't exist. I thought so..."
+ print "Child traceback:"
+ print e.child_traceback
+ else:
+ print "Error", e.errno
+ else:
+ sys.stderr.write( "Gosh. No error.\n" )
+
+
+def _demo_windows():
+ #
+ # Example 1: Connecting several subprocesses
+ #
+ print "Looking for 'PROMPT' in set output..."
+ p1 = Popen("set", stdout=PIPE, shell=True)
+ p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
+ print repr(p2.communicate()[0])
+
+ #
+ # Example 2: Simple execution of program
+ #
+ print "Executing calc..."
+ p = Popen("calc")
+ p.wait()
+
+
+if __name__ == "__main__":
+ if mswindows:
+ _demo_windows()
+ else:
+ _demo_posix()
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/_scons_textwrap.py b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_textwrap.py
new file mode 100644
index 000000000..72ed9b9c5
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/_scons_textwrap.py
@@ -0,0 +1,376 @@
+"""Text wrapping and filling.
+"""
+
+# Copyright (C) 1999-2001 Gregory P. Ward.
+# Copyright (C) 2002, 2003 Python Software Foundation.
+# Written by Greg Ward <gward@python.net>
+
+__revision__ = "$Id: textwrap.py,v 1.32.8.2 2004/05/13 01:48:15 gward Exp $"
+
+import string, re
+
+try:
+ unicode
+except NameError:
+ class unicode:
+ pass
+
+# Do the right thing with boolean values for all known Python versions
+# (so this module can be copied to projects that don't depend on Python
+# 2.3, e.g. Optik and Docutils).
+try:
+ True, False
+except NameError:
+ (True, False) = (1, 0)
+
+__all__ = ['TextWrapper', 'wrap', 'fill']
+
+# Hardcode the recognized whitespace characters to the US-ASCII
+# whitespace characters. The main reason for doing this is that in
+# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
+# that character winds up in string.whitespace. Respecting
+# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
+# same as any other whitespace char, which is clearly wrong (it's a
+# *non-breaking* space), 2) possibly cause problems with Unicode,
+# since 0xa0 is not in range(128).
+_whitespace = '\t\n\x0b\x0c\r '
+
+class TextWrapper:
+ """
+ Object for wrapping/filling text. The public interface consists of
+ the wrap() and fill() methods; the other methods are just there for
+ subclasses to override in order to tweak the default behaviour.
+ If you want to completely replace the main wrapping algorithm,
+ you'll probably have to override _wrap_chunks().
+
+ Several instance attributes control various aspects of wrapping:
+ width (default: 70)
+ the maximum width of wrapped lines (unless break_long_words
+ is false)
+ initial_indent (default: "")
+ string that will be prepended to the first line of wrapped
+ output. Counts towards the line's width.
+ subsequent_indent (default: "")
+ string that will be prepended to all lines save the first
+ of wrapped output; also counts towards each line's width.
+ expand_tabs (default: true)
+ Expand tabs in input text to spaces before further processing.
+ Each tab will become 1 .. 8 spaces, depending on its position in
+ its line. If false, each tab is treated as a single character.
+ replace_whitespace (default: true)
+ Replace all whitespace characters in the input text by spaces
+ after tab expansion. Note that if expand_tabs is false and
+ replace_whitespace is true, every tab will be converted to a
+ single space!
+ fix_sentence_endings (default: false)
+ Ensure that sentence-ending punctuation is always followed
+ by two spaces. Off by default because the algorithm is
+ (unavoidably) imperfect.
+ break_long_words (default: true)
+ Break words longer than 'width'. If false, those words will not
+ be broken, and some lines might be longer than 'width'.
+ """
+
+ whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace))
+
+ unicode_whitespace_trans = {}
+ try:
+ uspace = eval("ord(u' ')")
+ except SyntaxError:
+ # Python1.5 doesn't understand u'' syntax, in which case we
+ # won't actually use the unicode translation below, so it
+ # doesn't matter what value we put in the table.
+ uspace = ord(' ')
+ for x in map(ord, _whitespace):
+ unicode_whitespace_trans[x] = uspace
+
+ # This funky little regex is just the trick for splitting
+ # text up into word-wrappable chunks. E.g.
+ # "Hello there -- you goof-ball, use the -b option!"
+ # splits into
+ # Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
+ # (after stripping out empty strings).
+ try:
+ wordsep_re = re.compile(r'(\s+|' # any whitespace
+ r'[^\s\w]*\w{2,}-(?=\w{2,})|' # hyphenated words
+ r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
+ except re.error:
+ # Pre-2.0 Python versions don't have the (?<= negative look-behind
+ # assertion. It mostly doesn't matter for the simple input
+ # SCons is going to give it, so just leave it out.
+ wordsep_re = re.compile(r'(\s+|' # any whitespace
+ r'-*\w{2,}-(?=\w{2,}))') # hyphenated words
+
+ # XXX will there be a locale-or-charset-aware version of
+ # string.lowercase in 2.3?
+ sentence_end_re = re.compile(r'[%s]' # lowercase letter
+ r'[\.\!\?]' # sentence-ending punct.
+ r'[\"\']?' # optional end-of-quote
+ % string.lowercase)
+
+
+ def __init__(self,
+ width=70,
+ initial_indent="",
+ subsequent_indent="",
+ expand_tabs=True,
+ replace_whitespace=True,
+ fix_sentence_endings=False,
+ break_long_words=True):
+ self.width = width
+ self.initial_indent = initial_indent
+ self.subsequent_indent = subsequent_indent
+ self.expand_tabs = expand_tabs
+ self.replace_whitespace = replace_whitespace
+ self.fix_sentence_endings = fix_sentence_endings
+ self.break_long_words = break_long_words
+
+
+ # -- Private methods -----------------------------------------------
+ # (possibly useful for subclasses to override)
+
+ def _munge_whitespace(self, text):
+ """_munge_whitespace(text : string) -> string
+
+ Munge whitespace in text: expand tabs and convert all other
+ whitespace characters to spaces. Eg. " foo\tbar\n\nbaz"
+ becomes " foo bar baz".
+ """
+ if self.expand_tabs:
+ text = string.expandtabs(text)
+ if self.replace_whitespace:
+ if type(text) == type(''):
+ text = string.translate(text, self.whitespace_trans)
+ elif isinstance(text, unicode):
+ text = string.translate(text, self.unicode_whitespace_trans)
+ return text
+
+
+ def _split(self, text):
+ """_split(text : string) -> [string]
+
+ Split the text to wrap into indivisible chunks. Chunks are
+ not quite the same as words; see wrap_chunks() for full
+ details. As an example, the text
+ Look, goof-ball -- use the -b option!
+ breaks into the following chunks:
+ 'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
+ 'use', ' ', 'the', ' ', '-b', ' ', 'option!'
+ """
+ chunks = self.wordsep_re.split(text)
+ chunks = filter(None, chunks)
+ return chunks
+
+ def _fix_sentence_endings(self, chunks):
+ """_fix_sentence_endings(chunks : [string])
+
+ Correct for sentence endings buried in 'chunks'. Eg. when the
+ original text contains "... foo.\nBar ...", munge_whitespace()
+ and split() will convert that to [..., "foo.", " ", "Bar", ...]
+ which has one too few spaces; this method simply changes the one
+ space to two.
+ """
+ i = 0
+ pat = self.sentence_end_re
+ while i < len(chunks)-1:
+ if chunks[i+1] == " " and pat.search(chunks[i]):
+ chunks[i+1] = " "
+ i = i + 2
+ else:
+ i = i + 1
+
+ def _handle_long_word(self, chunks, cur_line, cur_len, width):
+ """_handle_long_word(chunks : [string],
+ cur_line : [string],
+ cur_len : int, width : int)
+
+ Handle a chunk of text (most likely a word, not whitespace) that
+ is too long to fit in any line.
+ """
+ space_left = max(width - cur_len, 1)
+
+ # If we're allowed to break long words, then do so: put as much
+ # of the next chunk onto the current line as will fit.
+ if self.break_long_words:
+ cur_line.append(chunks[0][0:space_left])
+ chunks[0] = chunks[0][space_left:]
+
+ # Otherwise, we have to preserve the long word intact. Only add
+ # it to the current line if there's nothing already there --
+ # that minimizes how much we violate the width constraint.
+ elif not cur_line:
+ cur_line.append(chunks.pop(0))
+
+ # If we're not allowed to break long words, and there's already
+ # text on the current line, do nothing. Next time through the
+ # main loop of _wrap_chunks(), we'll wind up here again, but
+ # cur_len will be zero, so the next line will be entirely
+ # devoted to the long word that we can't handle right now.
+
+ def _wrap_chunks(self, chunks):
+ """_wrap_chunks(chunks : [string]) -> [string]
+
+ Wrap a sequence of text chunks and return a list of lines of
+ length 'self.width' or less. (If 'break_long_words' is false,
+ some lines may be longer than this.) Chunks correspond roughly
+ to words and the whitespace between them: each chunk is
+ indivisible (modulo 'break_long_words'), but a line break can
+ come between any two chunks. Chunks should not have internal
+ whitespace; ie. a chunk is either all whitespace or a "word".
+ Whitespace chunks will be removed from the beginning and end of
+ lines, but apart from that whitespace is preserved.
+ """
+ lines = []
+ if self.width <= 0:
+ raise ValueError("invalid width %r (must be > 0)" % self.width)
+
+ while chunks:
+
+ # Start the list of chunks that will make up the current line.
+ # cur_len is just the length of all the chunks in cur_line.
+ cur_line = []
+ cur_len = 0
+
+ # Figure out which static string will prefix this line.
+ if lines:
+ indent = self.subsequent_indent
+ else:
+ indent = self.initial_indent
+
+ # Maximum width for this line.
+ width = self.width - len(indent)
+
+ # First chunk on line is whitespace -- drop it, unless this
+ # is the very beginning of the text (ie. no lines started yet).
+ if string.strip(chunks[0]) == '' and lines:
+ del chunks[0]
+
+ while chunks:
+ l = len(chunks[0])
+
+ # Can at least squeeze this chunk onto the current line.
+ if cur_len + l <= width:
+ cur_line.append(chunks.pop(0))
+ cur_len = cur_len + l
+
+ # Nope, this line is full.
+ else:
+ break
+
+ # The current line is full, and the next chunk is too big to
+ # fit on *any* line (not just this one).
+ if chunks and len(chunks[0]) > width:
+ self._handle_long_word(chunks, cur_line, cur_len, width)
+
+ # If the last chunk on this line is all whitespace, drop it.
+ if cur_line and string.strip(cur_line[-1]) == '':
+ del cur_line[-1]
+
+ # Convert current line back to a string and store it in list
+ # of all lines (return value).
+ if cur_line:
+ lines.append(indent + string.join(cur_line, ''))
+
+ return lines
+
+
+ # -- Public interface ----------------------------------------------
+
+ def wrap(self, text):
+ """wrap(text : string) -> [string]
+
+ Reformat the single paragraph in 'text' so it fits in lines of
+ no more than 'self.width' columns, and return a list of wrapped
+ lines. Tabs in 'text' are expanded with string.expandtabs(),
+ and all other whitespace characters (including newline) are
+ converted to space.
+ """
+ text = self._munge_whitespace(text)
+ indent = self.initial_indent
+ chunks = self._split(text)
+ if self.fix_sentence_endings:
+ self._fix_sentence_endings(chunks)
+ return self._wrap_chunks(chunks)
+
+ def fill(self, text):
+ """fill(text : string) -> string
+
+ Reformat the single paragraph in 'text' to fit in lines of no
+ more than 'self.width' columns, and return a new string
+ containing the entire wrapped paragraph.
+ """
+ return string.join(self.wrap(text), "\n")
+
+
+# -- Convenience interface ---------------------------------------------
+
+def wrap(text, width=70, **kwargs):
+ """Wrap a single paragraph of text, returning a list of wrapped lines.
+
+ Reformat the single paragraph in 'text' so it fits in lines of no
+ more than 'width' columns, and return a list of wrapped lines. By
+ default, tabs in 'text' are expanded with string.expandtabs(), and
+ all other whitespace characters (including newline) are converted to
+ space. See TextWrapper class for available keyword args to customize
+ wrapping behaviour.
+ """
+ kw = kwargs.copy()
+ kw['width'] = width
+ w = apply(TextWrapper, (), kw)
+ return w.wrap(text)
+
+def fill(text, width=70, **kwargs):
+ """Fill a single paragraph of text, returning a new string.
+
+ Reformat the single paragraph in 'text' to fit in lines of no more
+ than 'width' columns, and return a new string containing the entire
+ wrapped paragraph. As with wrap(), tabs are expanded and other
+ whitespace characters converted to space. See TextWrapper class for
+ available keyword args to customize wrapping behaviour.
+ """
+ kw = kwargs.copy()
+ kw['width'] = width
+ w = apply(TextWrapper, (), kw)
+ return w.fill(text)
+
+
+# -- Loosely related functionality -------------------------------------
+
+def dedent(text):
+ """dedent(text : string) -> string
+
+ Remove any whitespace than can be uniformly removed from the left
+ of every line in `text`.
+
+ This can be used e.g. to make triple-quoted strings line up with
+ the left edge of screen/whatever, while still presenting it in the
+ source code in indented form.
+
+ For example:
+
+ def test():
+ # end first line with \ to avoid the empty line!
+ s = '''\
+ hello
+ world
+ '''
+ print repr(s) # prints ' hello\n world\n '
+ print repr(dedent(s)) # prints 'hello\n world\n'
+ """
+ lines = text.expandtabs().split('\n')
+ margin = None
+ for line in lines:
+ content = line.lstrip()
+ if not content:
+ continue
+ indent = len(line) - len(content)
+ if margin is None:
+ margin = indent
+ else:
+ margin = min(margin, indent)
+
+ if margin is not None and margin > 0:
+ for i in range(len(lines)):
+ lines[i] = lines[i][margin:]
+
+ return string.join(lines, '\n')
diff --git a/tools/scons/scons-local-1.2.0/SCons/compat/builtins.py b/tools/scons/scons-local-1.2.0/SCons/compat/builtins.py
new file mode 100644
index 000000000..8ae38b6ff
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/compat/builtins.py
@@ -0,0 +1,181 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+# Portions of the following are derived from the compat.py file in
+# Twisted, under the following copyright:
+#
+# Copyright (c) 2001-2004 Twisted Matrix Laboratories
+
+__doc__ = """
+Compatibility idioms for __builtin__ names
+
+This module adds names to the __builtin__ module for things that we want
+to use in SCons but which don't show up until later Python versions than
+the earliest ones we support.
+
+This module checks for the following __builtin__ names:
+
+ all()
+ any()
+ bool()
+ dict()
+ True
+ False
+ zip()
+
+Implementations of functions are *NOT* guaranteed to be fully compliant
+with these functions in later versions of Python. We are only concerned
+with adding functionality that we actually use in SCons, so be wary
+if you lift this code for other uses. (That said, making these more
+nearly the same as later, official versions is still a desirable goal,
+we just don't need to be obsessive about it.)
+
+If you're looking at this with pydoc and various names don't show up in
+the FUNCTIONS or DATA output, that means those names are already built in
+to this version of Python and we don't need to add them from this module.
+"""
+
+__revision__ = "src/engine/SCons/compat/builtins.py 3842 2008/12/20 22:59:52 scons"
+
+import __builtin__
+
+try:
+ all
+except NameError:
+ # Pre-2.5 Python has no all() function.
+ def all(iterable):
+ """
+ Returns True if all elements of the iterable are true.
+ """
+ for element in iterable:
+ if not element:
+ return False
+ return True
+ __builtin__.all = all
+ all = all
+
+try:
+ any
+except NameError:
+ # Pre-2.5 Python has no any() function.
+ def any(iterable):
+ """
+ Returns True if any element of the iterable is true.
+ """
+ for element in iterable:
+ if element:
+ return True
+ return False
+ __builtin__.any = any
+ any = any
+
+try:
+ bool
+except NameError:
+ # Pre-2.2 Python has no bool() function.
+ def bool(value):
+ """Demote a value to 0 or 1, depending on its truth value.
+
+ This is not to be confused with types.BooleanType, which is
+ way too hard to duplicate in early Python versions to be
+ worth the trouble.
+ """
+ return not not value
+ __builtin__.bool = bool
+ bool = bool
+
+try:
+ dict
+except NameError:
+ # Pre-2.2 Python has no dict() keyword.
+ def dict(seq=[], **kwargs):
+ """
+ New dictionary initialization.
+ """
+ d = {}
+ for k, v in seq:
+ d[k] = v
+ d.update(kwargs)
+ return d
+ __builtin__.dict = dict
+
+try:
+ False
+except NameError:
+ # Pre-2.2 Python has no False keyword.
+ __builtin__.False = not 1
+ # Assign to False in this module namespace so it shows up in pydoc output.
+ False = False
+
+try:
+ True
+except NameError:
+ # Pre-2.2 Python has no True keyword.
+ __builtin__.True = not 0
+ # Assign to True in this module namespace so it shows up in pydoc output.
+ True = True
+
+try:
+ file
+except NameError:
+ # Pre-2.2 Python has no file() function.
+ __builtin__.file = open
+
+#
+try:
+ zip
+except NameError:
+ # Pre-2.2 Python has no zip() function.
+ def zip(*lists):
+ """
+ Emulates the behavior we need from the built-in zip() function
+ added in Python 2.2.
+
+ Returns a list of tuples, where each tuple contains the i-th
+ element rom each of the argument sequences. The returned
+ list is truncated in length to the length of the shortest
+ argument sequence.
+ """
+ result = []
+ for i in xrange(min(map(len, lists))):
+ result.append(tuple(map(lambda l, i=i: l[i], lists)))
+ return result
+ __builtin__.zip = zip
+
+
+
+#if sys.version_info[:3] in ((2, 2, 0), (2, 2, 1)):
+# def lstrip(s, c=string.whitespace):
+# while s and s[0] in c:
+# s = s[1:]
+# return s
+# def rstrip(s, c=string.whitespace):
+# while s and s[-1] in c:
+# s = s[:-1]
+# return s
+# def strip(s, c=string.whitespace, l=lstrip, r=rstrip):
+# return l(r(s, c), c)
+#
+# object.__setattr__(str, 'lstrip', lstrip)
+# object.__setattr__(str, 'rstrip', rstrip)
+# object.__setattr__(str, 'strip', strip)
diff --git a/tools/scons/scons-local-1.2.0/SCons/cpp.py b/tools/scons/scons-local-1.2.0/SCons/cpp.py
new file mode 100644
index 000000000..19809560a
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/cpp.py
@@ -0,0 +1,592 @@
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/cpp.py 3842 2008/12/20 22:59:52 scons"
+
+__doc__ = """
+SCons C Pre-Processor module
+"""
+
+# TODO(1.5): remove this import
+# This module doesn't use anything from SCons by name, but we import SCons
+# here to pull in zip() from the SCons.compat layer for early Pythons.
+import SCons
+
+import os
+import re
+import string
+
+#
+# First "subsystem" of regular expressions that we set up:
+#
+# Stuff to turn the C preprocessor directives in a file's contents into
+# a list of tuples that we can process easily.
+#
+
+# A table of regular expressions that fetch the arguments from the rest of
+# a C preprocessor line. Different directives have different arguments
+# that we want to fetch, using the regular expressions to which the lists
+# of preprocessor directives map.
+cpp_lines_dict = {
+ # Fetch the rest of a #if/#elif/#ifdef/#ifndef as one argument,
+ # separated from the keyword by white space.
+ ('if', 'elif', 'ifdef', 'ifndef',)
+ : '\s+(.+)',
+
+ # Fetch the rest of a #import/#include/#include_next line as one
+ # argument, with white space optional.
+ ('import', 'include', 'include_next',)
+ : '\s*(.+)',
+
+ # We don't care what comes after a #else or #endif line.
+ ('else', 'endif',) : '',
+
+ # Fetch three arguments from a #define line:
+ # 1) The #defined keyword.
+ # 2) The optional parentheses and arguments (if it's a function-like
+ # macro, '' if it's not).
+ # 3) The expansion value.
+ ('define',) : '\s+([_A-Za-z][_A-Za-z0-9_]+)(\([^)]*\))?\s*(.*)',
+
+ # Fetch the #undefed keyword from a #undef line.
+ ('undef',) : '\s+([_A-Za-z][A-Za-z0-9_]+)',
+}
+
+# Create a table that maps each individual C preprocessor directive to
+# the corresponding compiled regular expression that fetches the arguments
+# we care about.
+Table = {}
+for op_list, expr in cpp_lines_dict.items():
+ e = re.compile(expr)
+ for op in op_list:
+ Table[op] = e
+del e
+del op
+del op_list
+
+# Create a list of the expressions we'll use to match all of the
+# preprocessor directives. These are the same as the directives
+# themselves *except* that we must use a negative lookahead assertion
+# when matching "if" so it doesn't match the "if" in "ifdef."
+override = {
+ 'if' : 'if(?!def)',
+}
+l = map(lambda x, o=override: o.get(x, x), Table.keys())
+
+
+# Turn the list of expressions into one big honkin' regular expression
+# that will match all the preprocessor lines at once. This will return
+# a list of tuples, one for each preprocessor line. The preprocessor
+# directive will be the first element in each tuple, and the rest of
+# the line will be the second element.
+e = '^\s*#\s*(' + string.join(l, '|') + ')(.*)$'
+
+# And last but not least, compile the expression.
+CPP_Expression = re.compile(e, re.M)
+
+
+
+
+#
+# Second "subsystem" of regular expressions that we set up:
+#
+# Stuff to translate a C preprocessor expression (as found on a #if or
+# #elif line) into an equivalent Python expression that we can eval().
+#
+
+# A dictionary that maps the C representation of Boolean operators
+# to their Python equivalents.
+CPP_to_Python_Ops_Dict = {
+ '!' : ' not ',
+ '!=' : ' != ',
+ '&&' : ' and ',
+ '||' : ' or ',
+ '?' : ' and ',
+ ':' : ' or ',
+ '\r' : '',
+}
+
+CPP_to_Python_Ops_Sub = lambda m, d=CPP_to_Python_Ops_Dict: d[m.group(0)]
+
+# We have to sort the keys by length so that longer expressions
+# come *before* shorter expressions--in particular, "!=" must
+# come before "!" in the alternation. Without this, the Python
+# re module, as late as version 2.2.2, empirically matches the
+# "!" in "!=" first, instead of finding the longest match.
+# What's up with that?
+l = CPP_to_Python_Ops_Dict.keys()
+l.sort(lambda a, b: cmp(len(b), len(a)))
+
+# Turn the list of keys into one regular expression that will allow us
+# to substitute all of the operators at once.
+expr = string.join(map(re.escape, l), '|')
+
+# ...and compile the expression.
+CPP_to_Python_Ops_Expression = re.compile(expr)
+
+# A separate list of expressions to be evaluated and substituted
+# sequentially, not all at once.
+CPP_to_Python_Eval_List = [
+ ['defined\s+(\w+)', '__dict__.has_key("\\1")'],
+ ['defined\s*\((\w+)\)', '__dict__.has_key("\\1")'],
+ ['/\*.*\*/', ''],
+ ['/\*.*', ''],
+ ['//.*', ''],
+ ['(0x[0-9A-Fa-f]*)[UL]+', '\\1L'],
+]
+
+# Replace the string representations of the regular expressions in the
+# list with compiled versions.
+for l in CPP_to_Python_Eval_List:
+ l[0] = re.compile(l[0])
+
+# Wrap up all of the above into a handy function.
+def CPP_to_Python(s):
+ """
+ Converts a C pre-processor expression into an equivalent
+ Python expression that can be evaluated.
+ """
+ s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s)
+ for expr, repl in CPP_to_Python_Eval_List:
+ s = expr.sub(repl, s)
+ return s
+
+
+
+del expr
+del l
+del override
+
+
+
+class FunctionEvaluator:
+ """
+ Handles delayed evaluation of a #define function call.
+ """
+ def __init__(self, name, args, expansion):
+ """
+ Squirrels away the arguments and expansion value of a #define
+ macro function for later evaluation when we must actually expand
+ a value that uses it.
+ """
+ self.name = name
+ self.args = function_arg_separator.split(args)
+ try:
+ expansion = string.split(expansion, '##')
+ except (AttributeError, TypeError):
+ # Python 1.5 throws TypeError if "expansion" isn't a string,
+ # later versions throw AttributeError.
+ pass
+ self.expansion = expansion
+ def __call__(self, *values):
+ """
+ Evaluates the expansion of a #define macro function called
+ with the specified values.
+ """
+ if len(self.args) != len(values):
+ raise ValueError, "Incorrect number of arguments to `%s'" % self.name
+ # Create a dictionary that maps the macro arguments to the
+ # corresponding values in this "call." We'll use this when we
+ # eval() the expansion so that arguments will get expanded to
+ # the right values.
+ locals = {}
+ for k, v in zip(self.args, values):
+ locals[k] = v
+
+ parts = []
+ for s in self.expansion:
+ if not s in self.args:
+ s = repr(s)
+ parts.append(s)
+ statement = string.join(parts, ' + ')
+
+ return eval(statement, globals(), locals)
+
+
+
+# Find line continuations.
+line_continuations = re.compile('\\\\\r?\n')
+
+# Search for a "function call" macro on an expansion. Returns the
+# two-tuple of the "function" name itself, and a string containing the
+# arguments within the call parentheses.
+function_name = re.compile('(\S+)\(([^)]*)\)')
+
+# Split a string containing comma-separated function call arguments into
+# the separate arguments.
+function_arg_separator = re.compile(',\s*')
+
+
+
+class PreProcessor:
+ """
+ The main workhorse class for handling C pre-processing.
+ """
+ def __init__(self, current=os.curdir, cpppath=(), dict={}, all=0):
+ global Table
+
+ cpppath = tuple(cpppath)
+
+ self.searchpath = {
+ '"' : (current,) + cpppath,
+ '<' : cpppath + (current,),
+ }
+
+ # Initialize our C preprocessor namespace for tracking the
+ # values of #defined keywords. We use this namespace to look
+ # for keywords on #ifdef/#ifndef lines, and to eval() the
+ # expressions on #if/#elif lines (after massaging them from C to
+ # Python).
+ self.cpp_namespace = dict.copy()
+ self.cpp_namespace['__dict__'] = self.cpp_namespace
+
+ if all:
+ self.do_include = self.all_include
+
+ # For efficiency, a dispatch table maps each C preprocessor
+ # directive (#if, #define, etc.) to the method that should be
+ # called when we see it. We accomodate state changes (#if,
+ # #ifdef, #ifndef) by pushing the current dispatch table on a
+ # stack and changing what method gets called for each relevant
+ # directive we might see next at this level (#else, #elif).
+ # #endif will simply pop the stack.
+ d = {
+ 'scons_current_file' : self.scons_current_file
+ }
+ for op in Table.keys():
+ d[op] = getattr(self, 'do_' + op)
+ self.default_table = d
+
+ # Controlling methods.
+
+ def tupleize(self, contents):
+ """
+ Turns the contents of a file into a list of easily-processed
+ tuples describing the CPP lines in the file.
+
+ The first element of each tuple is the line's preprocessor
+ directive (#if, #include, #define, etc., minus the initial '#').
+ The remaining elements are specific to the type of directive, as
+ pulled apart by the regular expression.
+ """
+ global CPP_Expression, Table
+ contents = line_continuations.sub('', contents)
+ cpp_tuples = CPP_Expression.findall(contents)
+ return map(lambda m, t=Table:
+ (m[0],) + t[m[0]].match(m[1]).groups(),
+ cpp_tuples)
+
+ def __call__(self, file):
+ """
+ Pre-processes a file.
+
+ This is the main public entry point.
+ """
+ self.current_file = file
+ return self.process_contents(self.read_file(file), file)
+
+ def process_contents(self, contents, fname=None):
+ """
+ Pre-processes a file contents.
+
+ This is the main internal entry point.
+ """
+ self.stack = []
+ self.dispatch_table = self.default_table.copy()
+ self.current_file = fname
+ self.tuples = self.tupleize(contents)
+
+ self.initialize_result(fname)
+ while self.tuples:
+ t = self.tuples.pop(0)
+ # Uncomment to see the list of tuples being processed (e.g.,
+ # to validate the CPP lines are being translated correctly).
+ #print t
+ self.dispatch_table[t[0]](t)
+ return self.finalize_result(fname)
+
+ # Dispatch table stack manipulation methods.
+
+ def save(self):
+ """
+ Pushes the current dispatch table on the stack and re-initializes
+ the current dispatch table to the default.
+ """
+ self.stack.append(self.dispatch_table)
+ self.dispatch_table = self.default_table.copy()
+
+ def restore(self):
+ """
+ Pops the previous dispatch table off the stack and makes it the
+ current one.
+ """
+ try: self.dispatch_table = self.stack.pop()
+ except IndexError: pass
+
+ # Utility methods.
+
+ def do_nothing(self, t):
+ """
+ Null method for when we explicitly want the action for a
+ specific preprocessor directive to do nothing.
+ """
+ pass
+
+ def scons_current_file(self, t):
+ self.current_file = t[1]
+
+ def eval_expression(self, t):
+ """
+ Evaluates a C preprocessor expression.
+
+ This is done by converting it to a Python equivalent and
+ eval()ing it in the C preprocessor namespace we use to
+ track #define values.
+ """
+ t = CPP_to_Python(string.join(t[1:]))
+ try: return eval(t, self.cpp_namespace)
+ except (NameError, TypeError): return 0
+
+ def initialize_result(self, fname):
+ self.result = [fname]
+
+ def finalize_result(self, fname):
+ return self.result[1:]
+
+ def find_include_file(self, t):
+ """
+ Finds the #include file for a given preprocessor tuple.
+ """
+ fname = t[2]
+ for d in self.searchpath[t[1]]:
+ if d == os.curdir:
+ f = fname
+ else:
+ f = os.path.join(d, fname)
+ if os.path.isfile(f):
+ return f
+ return None
+
+ def read_file(self, file):
+ return open(file).read()
+
+ # Start and stop processing include lines.
+
+ def start_handling_includes(self, t=None):
+ """
+ Causes the PreProcessor object to start processing #import,
+ #include and #include_next lines.
+
+ This method will be called when a #if, #ifdef, #ifndef or #elif
+ evaluates True, or when we reach the #else in a #if, #ifdef,
+ #ifndef or #elif block where a condition already evaluated
+ False.
+
+ """
+ d = self.dispatch_table
+ d['import'] = self.do_import
+ d['include'] = self.do_include
+ d['include_next'] = self.do_include
+
+ def stop_handling_includes(self, t=None):
+ """
+ Causes the PreProcessor object to stop processing #import,
+ #include and #include_next lines.
+
+ This method will be called when a #if, #ifdef, #ifndef or #elif
+ evaluates False, or when we reach the #else in a #if, #ifdef,
+ #ifndef or #elif block where a condition already evaluated True.
+ """
+ d = self.dispatch_table
+ d['import'] = self.do_nothing
+ d['include'] = self.do_nothing
+ d['include_next'] = self.do_nothing
+
+ # Default methods for handling all of the preprocessor directives.
+ # (Note that what actually gets called for a given directive at any
+ # point in time is really controlled by the dispatch_table.)
+
+ def _do_if_else_condition(self, condition):
+ """
+ Common logic for evaluating the conditions on #if, #ifdef and
+ #ifndef lines.
+ """
+ self.save()
+ d = self.dispatch_table
+ if condition:
+ self.start_handling_includes()
+ d['elif'] = self.stop_handling_includes
+ d['else'] = self.stop_handling_includes
+ else:
+ self.stop_handling_includes()
+ d['elif'] = self.do_elif
+ d['else'] = self.start_handling_includes
+
+ def do_ifdef(self, t):
+ """
+ Default handling of a #ifdef line.
+ """
+ self._do_if_else_condition(self.cpp_namespace.has_key(t[1]))
+
+ def do_ifndef(self, t):
+ """
+ Default handling of a #ifndef line.
+ """
+ self._do_if_else_condition(not self.cpp_namespace.has_key(t[1]))
+
+ def do_if(self, t):
+ """
+ Default handling of a #if line.
+ """
+ self._do_if_else_condition(self.eval_expression(t))
+
+ def do_elif(self, t):
+ """
+ Default handling of a #elif line.
+ """
+ d = self.dispatch_table
+ if self.eval_expression(t):
+ self.start_handling_includes()
+ d['elif'] = self.stop_handling_includes
+ d['else'] = self.stop_handling_includes
+
+ def do_else(self, t):
+ """
+ Default handling of a #else line.
+ """
+ pass
+
+ def do_endif(self, t):
+ """
+ Default handling of a #endif line.
+ """
+ self.restore()
+
+ def do_define(self, t):
+ """
+ Default handling of a #define line.
+ """
+ _, name, args, expansion = t
+ try:
+ expansion = int(expansion)
+ except (TypeError, ValueError):
+ pass
+ if args:
+ evaluator = FunctionEvaluator(name, args[1:-1], expansion)
+ self.cpp_namespace[name] = evaluator
+ else:
+ self.cpp_namespace[name] = expansion
+
+ def do_undef(self, t):
+ """
+ Default handling of a #undef line.
+ """
+ try: del self.cpp_namespace[t[1]]
+ except KeyError: pass
+
+ def do_import(self, t):
+ """
+ Default handling of a #import line.
+ """
+ # XXX finish this -- maybe borrow/share logic from do_include()...?
+ pass
+
+ def do_include(self, t):
+ """
+ Default handling of a #include line.
+ """
+ t = self.resolve_include(t)
+ include_file = self.find_include_file(t)
+ if include_file:
+ #print "include_file =", include_file
+ self.result.append(include_file)
+ contents = self.read_file(include_file)
+ new_tuples = [('scons_current_file', include_file)] + \
+ self.tupleize(contents) + \
+ [('scons_current_file', self.current_file)]
+ self.tuples[:] = new_tuples + self.tuples
+
+ # Date: Tue, 22 Nov 2005 20:26:09 -0500
+ # From: Stefan Seefeld <seefeld@sympatico.ca>
+ #
+ # By the way, #include_next is not the same as #include. The difference
+ # being that #include_next starts its search in the path following the
+ # path that let to the including file. In other words, if your system
+ # include paths are ['/foo', '/bar'], and you are looking at a header
+ # '/foo/baz.h', it might issue an '#include_next <baz.h>' which would
+ # correctly resolve to '/bar/baz.h' (if that exists), but *not* see
+ # '/foo/baz.h' again. See http://www.delorie.com/gnu/docs/gcc/cpp_11.html
+ # for more reasoning.
+ #
+ # I have no idea in what context 'import' might be used.
+
+ # XXX is #include_next really the same as #include ?
+ do_include_next = do_include
+
+ # Utility methods for handling resolution of include files.
+
+ def resolve_include(self, t):
+ """Resolve a tuple-ized #include line.
+
+ This handles recursive expansion of values without "" or <>
+ surrounding the name until an initial " or < is found, to handle
+ #include FILE
+ where FILE is a #define somewhere else.
+ """
+ s = t[1]
+ while not s[0] in '<"':
+ #print "s =", s
+ try:
+ s = self.cpp_namespace[s]
+ except KeyError:
+ m = function_name.search(s)
+ s = self.cpp_namespace[m.group(1)]
+ if callable(s):
+ args = function_arg_separator.split(m.group(2))
+ s = apply(s, args)
+ if not s:
+ return None
+ return (t[0], s[0], s[1:-1])
+
+ def all_include(self, t):
+ """
+ """
+ self.result.append(self.resolve_include(t))
+
+class DumbPreProcessor(PreProcessor):
+ """A preprocessor that ignores all #if/#elif/#else/#endif directives
+ and just reports back *all* of the #include files (like the classic
+ SCons scanner did).
+
+ This is functionally equivalent to using a regular expression to
+ find all of the #include lines, only slower. It exists mainly as
+ an example of how the main PreProcessor class can be sub-classed
+ to tailor its behavior.
+ """
+ def __init__(self, *args, **kw):
+ apply(PreProcessor.__init__, (self,)+args, kw)
+ d = self.default_table
+ for func in ['if', 'elif', 'else', 'endif', 'ifdef', 'ifndef']:
+ d[func] = d[func] = self.do_nothing
+
+del __revision__
diff --git a/tools/scons/scons-local-1.2.0/SCons/dblite.py b/tools/scons/scons-local-1.2.0/SCons/dblite.py
new file mode 100644
index 000000000..437f05a37
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/dblite.py
@@ -0,0 +1,219 @@
+# dblite.py module contributed by Ralf W. Grosse-Kunstleve.
+# Extended for Unicode by Steven Knight.
+
+import cPickle
+import time
+import shutil
+import os
+import types
+import __builtin__
+
+keep_all_files = 00000
+ignore_corrupt_dbfiles = 0
+
+def corruption_warning(filename):
+ print "Warning: Discarding corrupt database:", filename
+
+if hasattr(types, 'UnicodeType'):
+ def is_string(s):
+ t = type(s)
+ return t is types.StringType or t is types.UnicodeType
+else:
+ def is_string(s):
+ return type(s) is types.StringType
+
+try:
+ unicode('a')
+except NameError:
+ def unicode(s): return s
+
+dblite_suffix = '.dblite'
+tmp_suffix = '.tmp'
+
+class dblite:
+
+ # Squirrel away references to the functions in various modules
+ # that we'll use when our __del__() method calls our sync() method
+ # during shutdown. We might get destroyed when Python is in the midst
+ # of tearing down the different modules we import in an essentially
+ # arbitrary order, and some of the various modules's global attributes
+ # may already be wiped out from under us.
+ #
+ # See the discussion at:
+ # http://mail.python.org/pipermail/python-bugs-list/2003-March/016877.html
+
+ _open = __builtin__.open
+ _cPickle_dump = cPickle.dump
+ _os_chmod = os.chmod
+ _os_rename = os.rename
+ _os_unlink = os.unlink
+ _shutil_copyfile = shutil.copyfile
+ _time_time = time.time
+
+ def __init__(self, file_base_name, flag, mode):
+ assert flag in (None, "r", "w", "c", "n")
+ if (flag is None): flag = "r"
+ base, ext = os.path.splitext(file_base_name)
+ if ext == dblite_suffix:
+ # There's already a suffix on the file name, don't add one.
+ self._file_name = file_base_name
+ self._tmp_name = base + tmp_suffix
+ else:
+ self._file_name = file_base_name + dblite_suffix
+ self._tmp_name = file_base_name + tmp_suffix
+ self._flag = flag
+ self._mode = mode
+ self._dict = {}
+ self._needs_sync = 00000
+ if (self._flag == "n"):
+ self._open(self._file_name, "wb", self._mode)
+ else:
+ try:
+ f = self._open(self._file_name, "rb")
+ except IOError, e:
+ if (self._flag != "c"):
+ raise e
+ self._open(self._file_name, "wb", self._mode)
+ else:
+ p = f.read()
+ if (len(p) > 0):
+ try:
+ self._dict = cPickle.loads(p)
+ except (cPickle.UnpicklingError, EOFError):
+ if (ignore_corrupt_dbfiles == 0): raise
+ if (ignore_corrupt_dbfiles == 1):
+ corruption_warning(self._file_name)
+
+ def __del__(self):
+ if (self._needs_sync):
+ self.sync()
+
+ def sync(self):
+ self._check_writable()
+ f = self._open(self._tmp_name, "wb", self._mode)
+ self._cPickle_dump(self._dict, f, 1)
+ f.close()
+ # Windows doesn't allow renaming if the file exists, so unlink
+ # it first, chmod'ing it to make sure we can do so. On UNIX, we
+ # may not be able to chmod the file if it's owned by someone else
+ # (e.g. from a previous run as root). We should still be able to
+ # unlink() the file if the directory's writable, though, so ignore
+ # any OSError exception thrown by the chmod() call.
+ try: self._os_chmod(self._file_name, 0777)
+ except OSError: pass
+ self._os_unlink(self._file_name)
+ self._os_rename(self._tmp_name, self._file_name)
+ self._needs_sync = 00000
+ if (keep_all_files):
+ self._shutil_copyfile(
+ self._file_name,
+ self._file_name + "_" + str(int(self._time_time())))
+
+ def _check_writable(self):
+ if (self._flag == "r"):
+ raise IOError("Read-only database: %s" % self._file_name)
+
+ def __getitem__(self, key):
+ return self._dict[key]
+
+ def __setitem__(self, key, value):
+ self._check_writable()
+ if (not is_string(key)):
+ raise TypeError, "key `%s' must be a string but is %s" % (key, type(key))
+ if (not is_string(value)):
+ raise TypeError, "value `%s' must be a string but is %s" % (value, type(value))
+ self._dict[key] = value
+ self._needs_sync = 0001
+
+ def keys(self):
+ return self._dict.keys()
+
+ def has_key(self, key):
+ return key in self._dict
+
+ def __contains__(self, key):
+ return key in self._dict
+
+ def iterkeys(self):
+ return self._dict.iterkeys()
+
+ __iter__ = iterkeys
+
+ def __len__(self):
+ return len(self._dict)
+
+def open(file, flag=None, mode=0666):
+ return dblite(file, flag, mode)
+
+def _exercise():
+ db = open("tmp", "n")
+ assert len(db) == 0
+ db["foo"] = "bar"
+ assert db["foo"] == "bar"
+ db[unicode("ufoo")] = unicode("ubar")
+ assert db[unicode("ufoo")] == unicode("ubar")
+ db.sync()
+ db = open("tmp", "c")
+ assert len(db) == 2, len(db)
+ assert db["foo"] == "bar"
+ db["bar"] = "foo"
+ assert db["bar"] == "foo"
+ db[unicode("ubar")] = unicode("ufoo")
+ assert db[unicode("ubar")] == unicode("ufoo")
+ db.sync()
+ db = open("tmp", "r")
+ assert len(db) == 4, len(db)
+ assert db["foo"] == "bar"
+ assert db["bar"] == "foo"
+ assert db[unicode("ufoo")] == unicode("ubar")
+ assert db[unicode("ubar")] == unicode("ufoo")
+ try:
+ db.sync()
+ except IOError, e:
+ assert str(e) == "Read-only database: tmp.dblite"
+ else:
+ raise RuntimeError, "IOError expected."
+ db = open("tmp", "w")
+ assert len(db) == 4
+ db["ping"] = "pong"
+ db.sync()
+ try:
+ db[(1,2)] = "tuple"
+ except TypeError, e:
+ assert str(e) == "key `(1, 2)' must be a string but is <type 'tuple'>", str(e)
+ else:
+ raise RuntimeError, "TypeError exception expected"
+ try:
+ db["list"] = [1,2]
+ except TypeError, e:
+ assert str(e) == "value `[1, 2]' must be a string but is <type 'list'>", str(e)
+ else:
+ raise RuntimeError, "TypeError exception expected"
+ db = open("tmp", "r")
+ assert len(db) == 5
+ db = open("tmp", "n")
+ assert len(db) == 0
+ _open("tmp.dblite", "w")
+ db = open("tmp", "r")
+ _open("tmp.dblite", "w").write("x")
+ try:
+ db = open("tmp", "r")
+ except cPickle.UnpicklingError:
+ pass
+ else:
+ raise RuntimeError, "cPickle exception expected."
+ global ignore_corrupt_dbfiles
+ ignore_corrupt_dbfiles = 2
+ db = open("tmp", "r")
+ assert len(db) == 0
+ os.unlink("tmp.dblite")
+ try:
+ db = open("tmp", "w")
+ except IOError, e:
+ assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e)
+ else:
+ raise RuntimeError, "IOError expected."
+ print "OK"
+
+if (__name__ == "__main__"):
+ _exercise()
diff --git a/tools/scons/scons-local-1.2.0/SCons/exitfuncs.py b/tools/scons/scons-local-1.2.0/SCons/exitfuncs.py
new file mode 100644
index 000000000..2feb86c94
--- /dev/null
+++ b/tools/scons/scons-local-1.2.0/SCons/exitfuncs.py
@@ -0,0 +1,71 @@
+"""SCons.exitfuncs
+
+Register functions which are executed when SCons exits for any reason.
+
+"""
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/engine/SCons/exitfuncs.py 3842 2008/12/20 22:59:52 scons"
+
+
+
+_exithandlers = []
+def _run_exitfuncs():
+ """run any registered exit functions
+
+ _exithandlers is traversed in reverse order so functions are executed
+ last in, first out.
+ """
+
+ while _exithandlers:
+ func, targs, kargs = _exithandlers.pop()
+ apply(func, targs, kargs)
+
+def register(func, *targs, **kargs):
+ """register a function to be executed upon normal program termination
+
+ func - function to be called at exit
+ targs - optional arguments to pass to func
+ kargs - optional keyword arguments to pass to func
+ """
+ _exithandlers.append((func, targs, kargs))
+
+import sys
+
+try:
+ x = sys.exitfunc
+
+ # if x isn't our own exit func executive, assume it's another
+ # registered exit function - append it to our list...
+ if x != _run_exitfuncs:
+ register(x)
+
+except AttributeError:
+ pass
+
+# make our exit function get run by python when it exits:
+sys.exitfunc = _run_exitfuncs
+
+del sys
diff --git a/tools/scons/scons-time.py b/tools/scons/scons-time.py
new file mode 100755
index 000000000..67b6643bf
--- /dev/null
+++ b/tools/scons/scons-time.py
@@ -0,0 +1,1513 @@
+#!/usr/bin/env python
+#
+# scons-time - run SCons timings and collect statistics
+#
+# A script for running a configuration through SCons with a standard
+# set of invocations to collect timing and memory statistics and to
+# capture the results in a consistent set of output files for display
+# and analysis.
+#
+
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+from __future__ import nested_scopes
+
+__revision__ = "src/script/scons-time.py 3842 2008/12/20 22:59:52 scons"
+
+import getopt
+import glob
+import os
+import os.path
+import re
+import shutil
+import string
+import sys
+import tempfile
+import time
+
+try:
+ False
+except NameError:
+ # Pre-2.2 Python has no False keyword.
+ import __builtin__
+ __builtin__.False = not 1
+
+try:
+ True
+except NameError:
+ # Pre-2.2 Python has no True keyword.
+ import __builtin__
+ __builtin__.True = not 0
+
+def make_temp_file(**kw):
+ try:
+ result = tempfile.mktemp(**kw)
+ try:
+ result = os.path.realpath(result)
+ except AttributeError:
+ # Python 2.1 has no os.path.realpath() method.
+ pass
+ except TypeError:
+ try:
+ save_template = tempfile.template
+ prefix = kw['prefix']
+ del kw['prefix']
+ tempfile.template = prefix
+ result = tempfile.mktemp(**kw)
+ finally:
+ tempfile.template = save_template
+ return result
+
+class Plotter:
+ def increment_size(self, largest):
+ """
+ Return the size of each horizontal increment line for a specified
+ maximum value. This returns a value that will provide somewhere
+ between 5 and 9 horizontal lines on the graph, on some set of
+ boundaries that are multiples of 10/100/1000/etc.
+ """
+ i = largest / 5
+ if not i:
+ return largest
+ multiplier = 1
+ while i >= 10:
+ i = i / 10
+ multiplier = multiplier * 10
+ return i * multiplier
+
+ def max_graph_value(self, largest):
+ # Round up to next integer.
+ largest = int(largest) + 1
+ increment = self.increment_size(largest)
+ return ((largest + increment - 1) / increment) * increment
+
+class Line:
+ def __init__(self, points, type, title, label, comment, fmt="%s %s"):
+ self.points = points
+ self.type = type
+ self.title = title
+ self.label = label
+ self.comment = comment
+ self.fmt = fmt
+
+ def print_label(self, inx, x, y):
+ if self.label:
+ print 'set label %s "%s" at %s,%s right' % (inx, self.label, x, y)
+
+ def plot_string(self):
+ if self.title:
+ title_string = 'title "%s"' % self.title
+ else:
+ title_string = 'notitle'
+ return "'-' %s with lines lt %s" % (title_string, self.type)
+
+ def print_points(self, fmt=None):
+ if fmt is None:
+ fmt = self.fmt
+ if self.comment:
+ print '# %s' % self.comment
+ for x, y in self.points:
+ # If y is None, it usually represents some kind of break
+ # in the line's index number. We might want to represent
+ # this some way rather than just drawing the line straight
+ # between the two points on either side.
+ if not y is None:
+ print fmt % (x, y)
+ print 'e'
+
+ def get_x_values(self):
+ return [ p[0] for p in self.points ]
+
+ def get_y_values(self):
+ return [ p[1] for p in self.points ]
+
+class Gnuplotter(Plotter):
+
+ def __init__(self, title, key_location):
+ self.lines = []
+ self.title = title
+ self.key_location = key_location
+
+ def line(self, points, type, title=None, label=None, comment=None, fmt='%s %s'):
+ if points:
+ line = Line(points, type, title, label, comment, fmt)
+ self.lines.append(line)
+
+ def plot_string(self, line):
+ return line.plot_string()
+
+ def vertical_bar(self, x, type, label, comment):
+ if self.get_min_x() <= x and x <= self.get_max_x():
+ points = [(x, 0), (x, self.max_graph_value(self.get_max_y()))]
+ self.line(points, type, label, comment)
+
+ def get_all_x_values(self):
+ result = []
+ for line in self.lines:
+ result.extend(line.get_x_values())
+ return filter(lambda r: not r is None, result)
+
+ def get_all_y_values(self):
+ result = []
+ for line in self.lines:
+ result.extend(line.get_y_values())
+ return filter(lambda r: not r is None, result)
+
+ def get_min_x(self):
+ try:
+ return self.min_x
+ except AttributeError:
+ try:
+ self.min_x = min(self.get_all_x_values())
+ except ValueError:
+ self.min_x = 0
+ return self.min_x
+
+ def get_max_x(self):
+ try:
+ return self.max_x
+ except AttributeError:
+ try:
+ self.max_x = max(self.get_all_x_values())
+ except ValueError:
+ self.max_x = 0
+ return self.max_x
+
+ def get_min_y(self):
+ try:
+ return self.min_y
+ except AttributeError:
+ try:
+ self.min_y = min(self.get_all_y_values())
+ except ValueError:
+ self.min_y = 0
+ return self.min_y
+
+ def get_max_y(self):
+ try:
+ return self.max_y
+ except AttributeError:
+ try:
+ self.max_y = max(self.get_all_y_values())
+ except ValueError:
+ self.max_y = 0
+ return self.max_y
+
+ def draw(self):
+
+ if not self.lines:
+ return
+
+ if self.title:
+ print 'set title "%s"' % self.title
+ print 'set key %s' % self.key_location
+
+ min_y = self.get_min_y()
+ max_y = self.max_graph_value(self.get_max_y())
+ range = max_y - min_y
+ incr = range / 10.0
+ start = min_y + (max_y / 2.0) + (2.0 * incr)
+ position = [ start - (i * incr) for i in xrange(5) ]
+
+ inx = 1
+ for line in self.lines:
+ line.print_label(inx, line.points[0][0]-1,
+ position[(inx-1) % len(position)])
+ inx += 1
+
+ plot_strings = [ self.plot_string(l) for l in self.lines ]
+ print 'plot ' + ', \\\n '.join(plot_strings)
+
+ for line in self.lines:
+ line.print_points()
+
+
+
+def untar(fname):
+ import tarfile
+ tar = tarfile.open(name=fname, mode='r')
+ for tarinfo in tar:
+ tar.extract(tarinfo)
+ tar.close()
+
+def unzip(fname):
+ import zipfile
+ zf = zipfile.ZipFile(fname, 'r')
+ for name in zf.namelist():
+ dir = os.path.dirname(name)
+ try:
+ os.makedirs(dir)
+ except:
+ pass
+ open(name, 'w').write(zf.read(name))
+
+def read_tree(dir):
+ def read_files(arg, dirname, fnames):
+ for fn in fnames:
+ fn = os.path.join(dirname, fn)
+ if os.path.isfile(fn):
+ open(fn, 'rb').read()
+ os.path.walk('.', read_files, None)
+
+def redirect_to_file(command, log):
+ return '%s > %s 2>&1' % (command, log)
+
+def tee_to_file(command, log):
+ return '%s 2>&1 | tee %s' % (command, log)
+
+
+
+class SConsTimer:
+ """
+ Usage: scons-time SUBCOMMAND [ARGUMENTS]
+ Type "scons-time help SUBCOMMAND" for help on a specific subcommand.
+
+ Available subcommands:
+ func Extract test-run data for a function
+ help Provides help
+ mem Extract --debug=memory data from test runs
+ obj Extract --debug=count data from test runs
+ time Extract --debug=time data from test runs
+ run Runs a test configuration
+ """
+
+ name = 'scons-time'
+ name_spaces = ' '*len(name)
+
+ def makedict(**kw):
+ return kw
+
+ default_settings = makedict(
+ aegis = 'aegis',
+ aegis_project = None,
+ chdir = None,
+ config_file = None,
+ initial_commands = [],
+ key_location = 'bottom left',
+ orig_cwd = os.getcwd(),
+ outdir = None,
+ prefix = '',
+ python = '"%s"' % sys.executable,
+ redirect = redirect_to_file,
+ scons = None,
+ scons_flags = '--debug=count --debug=memory --debug=time --debug=memoizer',
+ scons_lib_dir = None,
+ scons_wrapper = None,
+ startup_targets = '--help',
+ subdir = None,
+ subversion_url = None,
+ svn = 'svn',
+ svn_co_flag = '-q',
+ tar = 'tar',
+ targets = '',
+ targets0 = None,
+ targets1 = None,
+ targets2 = None,
+ title = None,
+ unzip = 'unzip',
+ verbose = False,
+ vertical_bars = [],
+
+ unpack_map = {
+ '.tar.gz' : (untar, '%(tar)s xzf %%s'),
+ '.tgz' : (untar, '%(tar)s xzf %%s'),
+ '.tar' : (untar, '%(tar)s xf %%s'),
+ '.zip' : (unzip, '%(unzip)s %%s'),
+ },
+ )
+
+ run_titles = [
+ 'Startup',
+ 'Full build',
+ 'Up-to-date build',
+ ]
+
+ run_commands = [
+ '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof0)s %(targets0)s',
+ '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof1)s %(targets1)s',
+ '%(python)s %(scons_wrapper)s %(scons_flags)s --profile=%(prof2)s %(targets2)s',
+ ]
+
+ stages = [
+ 'pre-read',
+ 'post-read',
+ 'pre-build',
+ 'post-build',
+ ]
+
+ stage_strings = {
+ 'pre-read' : 'Memory before reading SConscript files:',
+ 'post-read' : 'Memory after reading SConscript files:',
+ 'pre-build' : 'Memory before building targets:',
+ 'post-build' : 'Memory after building targets:',
+ }
+
+ memory_string_all = 'Memory '
+
+ default_stage = stages[-1]
+
+ time_strings = {
+ 'total' : 'Total build time',
+ 'SConscripts' : 'Total SConscript file execution time',
+ 'SCons' : 'Total SCons execution time',
+ 'commands' : 'Total command execution time',
+ }
+
+ time_string_all = 'Total .* time'
+
+ #
+
+ def __init__(self):
+ self.__dict__.update(self.default_settings)
+
+ # Functions for displaying and executing commands.
+
+ def subst(self, x, dictionary):
+ try:
+ return x % dictionary
+ except TypeError:
+ # x isn't a string (it's probably a Python function),
+ # so just return it.
+ return x
+
+ def subst_variables(self, command, dictionary):
+ """
+ Substitutes (via the format operator) the values in the specified
+ dictionary into the specified command.
+
+ The command can be an (action, string) tuple. In all cases, we
+ perform substitution on strings and don't worry if something isn't
+ a string. (It's probably a Python function to be executed.)
+ """
+ try:
+ command + ''
+ except TypeError:
+ action = command[0]
+ string = command[1]
+ args = command[2:]
+ else:
+ action = command
+ string = action
+ args = (())
+ action = self.subst(action, dictionary)
+ string = self.subst(string, dictionary)
+ return (action, string, args)
+
+ def _do_not_display(self, msg, *args):
+ pass
+
+ def display(self, msg, *args):
+ """
+ Displays the specified message.
+
+ Each message is prepended with a standard prefix of our name
+ plus the time.
+ """
+ if callable(msg):
+ msg = msg(*args)
+ else:
+ msg = msg % args
+ if msg is None:
+ return
+ fmt = '%s[%s]: %s\n'
+ sys.stdout.write(fmt % (self.name, time.strftime('%H:%M:%S'), msg))
+
+ def _do_not_execute(self, action, *args):
+ pass
+
+ def execute(self, action, *args):
+ """
+ Executes the specified action.
+
+ The action is called if it's a callable Python function, and
+ otherwise passed to os.system().
+ """
+ if callable(action):
+ action(*args)
+ else:
+ os.system(action % args)
+
+ def run_command_list(self, commands, dict):
+ """
+ Executes a list of commands, substituting values from the
+ specified dictionary.
+ """
+ commands = [ self.subst_variables(c, dict) for c in commands ]
+ for action, string, args in commands:
+ self.display(string, *args)
+ sys.stdout.flush()
+ status = self.execute(action, *args)
+ if status:
+ sys.exit(status)
+
+ def log_display(self, command, log):
+ command = self.subst(command, self.__dict__)
+ if log:
+ command = self.redirect(command, log)
+ return command
+
+ def log_execute(self, command, log):
+ command = self.subst(command, self.__dict__)
+ output = os.popen(command).read()
+ if self.verbose:
+ sys.stdout.write(output)
+ open(log, 'wb').write(output)
+
+ #
+
+ def archive_splitext(self, path):
+ """
+ Splits an archive name into a filename base and extension.
+
+ This is like os.path.splitext() (which it calls) except that it
+ also looks for '.tar.gz' and treats it as an atomic extensions.
+ """
+ if path.endswith('.tar.gz'):
+ return path[:-7], path[-7:]
+ else:
+ return os.path.splitext(path)
+
+ def args_to_files(self, args, tail=None):
+ """
+ Takes a list of arguments, expands any glob patterns, and
+ returns the last "tail" files from the list.
+ """
+ files = []
+ for a in args:
+ g = glob.glob(a)
+ g.sort()
+ files.extend(g)
+
+ if tail:
+ files = files[-tail:]
+
+ return files
+
+ def ascii_table(self, files, columns,
+ line_function, file_function=lambda x: x,
+ *args, **kw):
+
+ header_fmt = ' '.join(['%12s'] * len(columns))
+ line_fmt = header_fmt + ' %s'
+
+ print header_fmt % columns
+
+ for file in files:
+ t = line_function(file, *args, **kw)
+ if t is None:
+ t = []
+ diff = len(columns) - len(t)
+ if diff > 0:
+ t += [''] * diff
+ t.append(file_function(file))
+ print line_fmt % tuple(t)
+
+ def collect_results(self, files, function, *args, **kw):
+ results = {}
+
+ for file in files:
+ base = os.path.splitext(file)[0]
+ run, index = string.split(base, '-')[-2:]
+
+ run = int(run)
+ index = int(index)
+
+ value = function(file, *args, **kw)
+
+ try:
+ r = results[index]
+ except KeyError:
+ r = []
+ results[index] = r
+ r.append((run, value))
+
+ return results
+
+ def doc_to_help(self, obj):
+ """
+ Translates an object's __doc__ string into help text.
+
+ This strips a consistent number of spaces from each line in the
+ help text, essentially "outdenting" the text to the left-most
+ column.
+ """
+ doc = obj.__doc__
+ if doc is None:
+ return ''
+ return self.outdent(doc)
+
+ def find_next_run_number(self, dir, prefix):
+ """
+ Returns the next run number in a directory for the specified prefix.
+
+ Examines the contents the specified directory for files with the
+ specified prefix, extracts the run numbers from each file name,
+ and returns the next run number after the largest it finds.
+ """
+ x = re.compile(re.escape(prefix) + '-([0-9]+).*')
+ matches = map(lambda e, x=x: x.match(e), os.listdir(dir))
+ matches = filter(None, matches)
+ if not matches:
+ return 0
+ run_numbers = map(lambda m: int(m.group(1)), matches)
+ return int(max(run_numbers)) + 1
+
+ def gnuplot_results(self, results, fmt='%s %.3f'):
+ """
+ Prints out a set of results in Gnuplot format.
+ """
+ gp = Gnuplotter(self.title, self.key_location)
+
+ indices = results.keys()
+ indices.sort()
+
+ for i in indices:
+ try:
+ t = self.run_titles[i]
+ except IndexError:
+ t = '??? %s ???' % i
+ results[i].sort()
+ gp.line(results[i], i+1, t, None, t, fmt=fmt)
+
+ for bar_tuple in self.vertical_bars:
+ try:
+ x, type, label, comment = bar_tuple
+ except ValueError:
+ x, type, label = bar_tuple
+ comment = label
+ gp.vertical_bar(x, type, label, comment)
+
+ gp.draw()
+
+ def logfile_name(self, invocation):
+ """
+ Returns the absolute path of a log file for the specificed
+ invocation number.
+ """
+ name = self.prefix_run + '-%d.log' % invocation
+ return os.path.join(self.outdir, name)
+
+ def outdent(self, s):
+ """
+ Strip as many spaces from each line as are found at the beginning
+ of the first line in the list.
+ """
+ lines = s.split('\n')
+ if lines[0] == '':
+ lines = lines[1:]
+ spaces = re.match(' *', lines[0]).group(0)
+ def strip_initial_spaces(l, s=spaces):
+ if l.startswith(spaces):
+ l = l[len(spaces):]
+ return l
+ return '\n'.join([ strip_initial_spaces(l) for l in lines ]) + '\n'
+
+ def profile_name(self, invocation):
+ """
+ Returns the absolute path of a profile file for the specified
+ invocation number.
+ """
+ name = self.prefix_run + '-%d.prof' % invocation
+ return os.path.join(self.outdir, name)
+
+ def set_env(self, key, value):
+ os.environ[key] = value
+
+ #
+
+ def get_debug_times(self, file, time_string=None):
+ """
+ Fetch times from the --debug=time strings in the specified file.
+ """
+ if time_string is None:
+ search_string = self.time_string_all
+ else:
+ search_string = time_string
+ contents = open(file).read()
+ if not contents:
+ sys.stderr.write('file %s has no contents!\n' % repr(file))
+ return None
+ result = re.findall(r'%s: ([\d\.]*)' % search_string, contents)[-4:]
+ result = [ float(r) for r in result ]
+ if not time_string is None:
+ try:
+ result = result[0]
+ except IndexError:
+ sys.stderr.write('file %s has no results!\n' % repr(file))
+ return None
+ return result
+
+ def get_function_profile(self, file, function):
+ """
+ Returns the file, line number, function name, and cumulative time.
+ """
+ try:
+ import pstats
+ except ImportError, e:
+ sys.stderr.write('%s: func: %s\n' % (self.name, e))
+ sys.stderr.write('%s This version of Python is missing the profiler.\n' % self.name_spaces)
+ sys.stderr.write('%s Cannot use the "func" subcommand.\n' % self.name_spaces)
+ sys.exit(1)
+ statistics = pstats.Stats(file).stats
+ matches = [ e for e in statistics.items() if e[0][2] == function ]
+ r = matches[0]
+ return r[0][0], r[0][1], r[0][2], r[1][3]
+
+ def get_function_time(self, file, function):
+ """
+ Returns just the cumulative time for the specified function.
+ """
+ return self.get_function_profile(file, function)[3]
+
+ def get_memory(self, file, memory_string=None):
+ """
+ Returns a list of integers of the amount of memory used. The
+ default behavior is to return all the stages.
+ """
+ if memory_string is None:
+ search_string = self.memory_string_all
+ else:
+ search_string = memory_string
+ lines = open(file).readlines()
+ lines = [ l for l in lines if l.startswith(search_string) ][-4:]
+ result = [ int(l.split()[-1]) for l in lines[-4:] ]
+ if len(result) == 1:
+ result = result[0]
+ return result
+
+ def get_object_counts(self, file, object_name, index=None):
+ """
+ Returns the counts of the specified object_name.
+ """
+ object_string = ' ' + object_name + '\n'
+ lines = open(file).readlines()
+ line = [ l for l in lines if l.endswith(object_string) ][0]
+ result = [ int(field) for field in line.split()[:4] ]
+ if not index is None:
+ result = result[index]
+ return result
+
+ #
+
+ command_alias = {}
+
+ def execute_subcommand(self, argv):
+ """
+ Executes the do_*() function for the specified subcommand (argv[0]).
+ """
+ if not argv:
+ return
+ cmdName = self.command_alias.get(argv[0], argv[0])
+ try:
+ func = getattr(self, 'do_' + cmdName)
+ except AttributeError:
+ return self.default(argv)
+ try:
+ return func(argv)
+ except TypeError, e:
+ sys.stderr.write("%s %s: %s\n" % (self.name, cmdName, e))
+ import traceback
+ traceback.print_exc(file=sys.stderr)
+ sys.stderr.write("Try '%s help %s'\n" % (self.name, cmdName))
+
+ def default(self, argv):
+ """
+ The default behavior for an unknown subcommand. Prints an
+ error message and exits.
+ """
+ sys.stderr.write('%s: Unknown subcommand "%s".\n' % (self.name, argv[0]))
+ sys.stderr.write('Type "%s help" for usage.\n' % self.name)
+ sys.exit(1)
+
+ #
+
+ def do_help(self, argv):
+ """
+ """
+ if argv[1:]:
+ for arg in argv[1:]:
+ try:
+ func = getattr(self, 'do_' + arg)
+ except AttributeError:
+ sys.stderr.write('%s: No help for "%s"\n' % (self.name, arg))
+ else:
+ try:
+ help = getattr(self, 'help_' + arg)
+ except AttributeError:
+ sys.stdout.write(self.doc_to_help(func))
+ sys.stdout.flush()
+ else:
+ help()
+ else:
+ doc = self.doc_to_help(self.__class__)
+ if doc:
+ sys.stdout.write(doc)
+ sys.stdout.flush()
+ return None
+
+ #
+
+ def help_func(self):
+ help = """\
+ Usage: scons-time func [OPTIONS] FILE [...]
+
+ -C DIR, --chdir=DIR Change to DIR before looking for files
+ -f FILE, --file=FILE Read configuration from specified FILE
+ --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT
+ --func=NAME, --function=NAME Report time for function NAME
+ -h, --help Print this help and exit
+ -p STRING, --prefix=STRING Use STRING as log file/profile prefix
+ -t NUMBER, --tail=NUMBER Only report the last NUMBER files
+ --title=TITLE Specify the output plot TITLE
+ """
+ sys.stdout.write(self.outdent(help))
+ sys.stdout.flush()
+
+ def do_func(self, argv):
+ """
+ """
+ format = 'ascii'
+ function_name = '_main'
+ tail = None
+
+ short_opts = '?C:f:hp:t:'
+
+ long_opts = [
+ 'chdir=',
+ 'file=',
+ 'fmt=',
+ 'format=',
+ 'func=',
+ 'function=',
+ 'help',
+ 'prefix=',
+ 'tail=',
+ 'title=',
+ ]
+
+ opts, args = getopt.getopt(argv[1:], short_opts, long_opts)
+
+ for o, a in opts:
+ if o in ('-C', '--chdir'):
+ self.chdir = a
+ elif o in ('-f', '--file'):
+ self.config_file = a
+ elif o in ('--fmt', '--format'):
+ format = a
+ elif o in ('--func', '--function'):
+ function_name = a
+ elif o in ('-?', '-h', '--help'):
+ self.do_help(['help', 'func'])
+ sys.exit(0)
+ elif o in ('--max',):
+ max_time = int(a)
+ elif o in ('-p', '--prefix'):
+ self.prefix = a
+ elif o in ('-t', '--tail'):
+ tail = int(a)
+ elif o in ('--title',):
+ self.title = a
+
+ if self.config_file:
+ execfile(self.config_file, self.__dict__)
+
+ if self.chdir:
+ os.chdir(self.chdir)
+
+ if not args:
+
+ pattern = '%s*.prof' % self.prefix
+ args = self.args_to_files([pattern], tail)
+
+ if not args:
+ if self.chdir:
+ directory = self.chdir
+ else:
+ directory = os.getcwd()
+
+ sys.stderr.write('%s: func: No arguments specified.\n' % self.name)
+ sys.stderr.write('%s No %s*.prof files found in "%s".\n' % (self.name_spaces, self.prefix, directory))
+ sys.stderr.write('%s Type "%s help func" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ else:
+
+ args = self.args_to_files(args, tail)
+
+ cwd_ = os.getcwd() + os.sep
+
+ if format == 'ascii':
+
+ def print_function_timing(file, func):
+ try:
+ f, line, func, time = self.get_function_profile(file, func)
+ except ValueError, e:
+ sys.stderr.write("%s: func: %s: %s\n" % (self.name, file, e))
+ else:
+ if f.startswith(cwd_):
+ f = f[len(cwd_):]
+ print "%.3f %s:%d(%s)" % (time, f, line, func)
+
+ for file in args:
+ print_function_timing(file, function_name)
+
+ elif format == 'gnuplot':
+
+ results = self.collect_results(args, self.get_function_time,
+ function_name)
+
+ self.gnuplot_results(results)
+
+ else:
+
+ sys.stderr.write('%s: func: Unknown format "%s".\n' % (self.name, format))
+ sys.exit(1)
+
+ #
+
+ def help_mem(self):
+ help = """\
+ Usage: scons-time mem [OPTIONS] FILE [...]
+
+ -C DIR, --chdir=DIR Change to DIR before looking for files
+ -f FILE, --file=FILE Read configuration from specified FILE
+ --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT
+ -h, --help Print this help and exit
+ -p STRING, --prefix=STRING Use STRING as log file/profile prefix
+ --stage=STAGE Plot memory at the specified stage:
+ pre-read, post-read, pre-build,
+ post-build (default: post-build)
+ -t NUMBER, --tail=NUMBER Only report the last NUMBER files
+ --title=TITLE Specify the output plot TITLE
+ """
+ sys.stdout.write(self.outdent(help))
+ sys.stdout.flush()
+
+ def do_mem(self, argv):
+
+ format = 'ascii'
+ logfile_path = lambda x: x
+ stage = self.default_stage
+ tail = None
+
+ short_opts = '?C:f:hp:t:'
+
+ long_opts = [
+ 'chdir=',
+ 'file=',
+ 'fmt=',
+ 'format=',
+ 'help',
+ 'prefix=',
+ 'stage=',
+ 'tail=',
+ 'title=',
+ ]
+
+ opts, args = getopt.getopt(argv[1:], short_opts, long_opts)
+
+ for o, a in opts:
+ if o in ('-C', '--chdir'):
+ self.chdir = a
+ elif o in ('-f', '--file'):
+ self.config_file = a
+ elif o in ('--fmt', '--format'):
+ format = a
+ elif o in ('-?', '-h', '--help'):
+ self.do_help(['help', 'mem'])
+ sys.exit(0)
+ elif o in ('-p', '--prefix'):
+ self.prefix = a
+ elif o in ('--stage',):
+ if not a in self.stages:
+ sys.stderr.write('%s: mem: Unrecognized stage "%s".\n' % (self.name, a))
+ sys.exit(1)
+ stage = a
+ elif o in ('-t', '--tail'):
+ tail = int(a)
+ elif o in ('--title',):
+ self.title = a
+
+ if self.config_file:
+ execfile(self.config_file, self.__dict__)
+
+ if self.chdir:
+ os.chdir(self.chdir)
+ logfile_path = lambda x, c=self.chdir: os.path.join(c, x)
+
+ if not args:
+
+ pattern = '%s*.log' % self.prefix
+ args = self.args_to_files([pattern], tail)
+
+ if not args:
+ if self.chdir:
+ directory = self.chdir
+ else:
+ directory = os.getcwd()
+
+ sys.stderr.write('%s: mem: No arguments specified.\n' % self.name)
+ sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory))
+ sys.stderr.write('%s Type "%s help mem" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ else:
+
+ args = self.args_to_files(args, tail)
+
+ cwd_ = os.getcwd() + os.sep
+
+ if format == 'ascii':
+
+ self.ascii_table(args, tuple(self.stages), self.get_memory, logfile_path)
+
+ elif format == 'gnuplot':
+
+ results = self.collect_results(args, self.get_memory,
+ self.stage_strings[stage])
+
+ self.gnuplot_results(results)
+
+ else:
+
+ sys.stderr.write('%s: mem: Unknown format "%s".\n' % (self.name, format))
+ sys.exit(1)
+
+ return 0
+
+ #
+
+ def help_obj(self):
+ help = """\
+ Usage: scons-time obj [OPTIONS] OBJECT FILE [...]
+
+ -C DIR, --chdir=DIR Change to DIR before looking for files
+ -f FILE, --file=FILE Read configuration from specified FILE
+ --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT
+ -h, --help Print this help and exit
+ -p STRING, --prefix=STRING Use STRING as log file/profile prefix
+ --stage=STAGE Plot memory at the specified stage:
+ pre-read, post-read, pre-build,
+ post-build (default: post-build)
+ -t NUMBER, --tail=NUMBER Only report the last NUMBER files
+ --title=TITLE Specify the output plot TITLE
+ """
+ sys.stdout.write(self.outdent(help))
+ sys.stdout.flush()
+
+ def do_obj(self, argv):
+
+ format = 'ascii'
+ logfile_path = lambda x: x
+ stage = self.default_stage
+ tail = None
+
+ short_opts = '?C:f:hp:t:'
+
+ long_opts = [
+ 'chdir=',
+ 'file=',
+ 'fmt=',
+ 'format=',
+ 'help',
+ 'prefix=',
+ 'stage=',
+ 'tail=',
+ 'title=',
+ ]
+
+ opts, args = getopt.getopt(argv[1:], short_opts, long_opts)
+
+ for o, a in opts:
+ if o in ('-C', '--chdir'):
+ self.chdir = a
+ elif o in ('-f', '--file'):
+ self.config_file = a
+ elif o in ('--fmt', '--format'):
+ format = a
+ elif o in ('-?', '-h', '--help'):
+ self.do_help(['help', 'obj'])
+ sys.exit(0)
+ elif o in ('-p', '--prefix'):
+ self.prefix = a
+ elif o in ('--stage',):
+ if not a in self.stages:
+ sys.stderr.write('%s: obj: Unrecognized stage "%s".\n' % (self.name, a))
+ sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+ stage = a
+ elif o in ('-t', '--tail'):
+ tail = int(a)
+ elif o in ('--title',):
+ self.title = a
+
+ if not args:
+ sys.stderr.write('%s: obj: Must specify an object name.\n' % self.name)
+ sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ object_name = args.pop(0)
+
+ if self.config_file:
+ execfile(self.config_file, self.__dict__)
+
+ if self.chdir:
+ os.chdir(self.chdir)
+ logfile_path = lambda x, c=self.chdir: os.path.join(c, x)
+
+ if not args:
+
+ pattern = '%s*.log' % self.prefix
+ args = self.args_to_files([pattern], tail)
+
+ if not args:
+ if self.chdir:
+ directory = self.chdir
+ else:
+ directory = os.getcwd()
+
+ sys.stderr.write('%s: obj: No arguments specified.\n' % self.name)
+ sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory))
+ sys.stderr.write('%s Type "%s help obj" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ else:
+
+ args = self.args_to_files(args, tail)
+
+ cwd_ = os.getcwd() + os.sep
+
+ if format == 'ascii':
+
+ self.ascii_table(args, tuple(self.stages), self.get_object_counts, logfile_path, object_name)
+
+ elif format == 'gnuplot':
+
+ stage_index = 0
+ for s in self.stages:
+ if stage == s:
+ break
+ stage_index = stage_index + 1
+
+ results = self.collect_results(args, self.get_object_counts,
+ object_name, stage_index)
+
+ self.gnuplot_results(results)
+
+ else:
+
+ sys.stderr.write('%s: obj: Unknown format "%s".\n' % (self.name, format))
+ sys.exit(1)
+
+ return 0
+
+ #
+
+ def help_run(self):
+ help = """\
+ Usage: scons-time run [OPTIONS] [FILE ...]
+
+ --aegis=PROJECT Use SCons from the Aegis PROJECT
+ --chdir=DIR Name of unpacked directory for chdir
+ -f FILE, --file=FILE Read configuration from specified FILE
+ -h, --help Print this help and exit
+ -n, --no-exec No execute, just print command lines
+ --number=NUMBER Put output in files for run NUMBER
+ --outdir=OUTDIR Put output files in OUTDIR
+ -p STRING, --prefix=STRING Use STRING as log file/profile prefix
+ --python=PYTHON Time using the specified PYTHON
+ -q, --quiet Don't print command lines
+ --scons=SCONS Time using the specified SCONS
+ --svn=URL, --subversion=URL Use SCons from Subversion URL
+ -v, --verbose Display output of commands
+ """
+ sys.stdout.write(self.outdent(help))
+ sys.stdout.flush()
+
+ def do_run(self, argv):
+ """
+ """
+ run_number_list = [None]
+
+ short_opts = '?f:hnp:qs:v'
+
+ long_opts = [
+ 'aegis=',
+ 'file=',
+ 'help',
+ 'no-exec',
+ 'number=',
+ 'outdir=',
+ 'prefix=',
+ 'python=',
+ 'quiet',
+ 'scons=',
+ 'svn=',
+ 'subdir=',
+ 'subversion=',
+ 'verbose',
+ ]
+
+ opts, args = getopt.getopt(argv[1:], short_opts, long_opts)
+
+ for o, a in opts:
+ if o in ('--aegis',):
+ self.aegis_project = a
+ elif o in ('-f', '--file'):
+ self.config_file = a
+ elif o in ('-?', '-h', '--help'):
+ self.do_help(['help', 'run'])
+ sys.exit(0)
+ elif o in ('-n', '--no-exec'):
+ self.execute = self._do_not_execute
+ elif o in ('--number',):
+ run_number_list = self.split_run_numbers(a)
+ elif o in ('--outdir',):
+ self.outdir = a
+ elif o in ('-p', '--prefix'):
+ self.prefix = a
+ elif o in ('--python',):
+ self.python = a
+ elif o in ('-q', '--quiet'):
+ self.display = self._do_not_display
+ elif o in ('-s', '--subdir'):
+ self.subdir = a
+ elif o in ('--scons',):
+ self.scons = a
+ elif o in ('--svn', '--subversion'):
+ self.subversion_url = a
+ elif o in ('-v', '--verbose'):
+ self.redirect = tee_to_file
+ self.verbose = True
+ self.svn_co_flag = ''
+
+ if not args and not self.config_file:
+ sys.stderr.write('%s: run: No arguments or -f config file specified.\n' % self.name)
+ sys.stderr.write('%s Type "%s help run" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ if self.config_file:
+ execfile(self.config_file, self.__dict__)
+
+ if args:
+ self.archive_list = args
+
+ archive_file_name = os.path.split(self.archive_list[0])[1]
+
+ if not self.subdir:
+ self.subdir = self.archive_splitext(archive_file_name)[0]
+
+ if not self.prefix:
+ self.prefix = self.archive_splitext(archive_file_name)[0]
+
+ prepare = None
+ if self.subversion_url:
+ prepare = self.prep_subversion_run
+ elif self.aegis_project:
+ prepare = self.prep_aegis_run
+
+ for run_number in run_number_list:
+ self.individual_run(run_number, self.archive_list, prepare)
+
+ def split_run_numbers(self, s):
+ result = []
+ for n in s.split(','):
+ try:
+ x, y = n.split('-')
+ except ValueError:
+ result.append(int(n))
+ else:
+ result.extend(range(int(x), int(y)+1))
+ return result
+
+ def scons_path(self, dir):
+ return os.path.join(dir, 'src', 'script', 'scons.py')
+
+ def scons_lib_dir_path(self, dir):
+ return os.path.join(dir, 'src', 'engine')
+
+ def prep_aegis_run(self, commands, removals):
+ self.aegis_tmpdir = make_temp_file(prefix = self.name + '-aegis-')
+ removals.append((shutil.rmtree, 'rm -rf %%s', self.aegis_tmpdir))
+
+ self.aegis_parent_project = os.path.splitext(self.aegis_project)[0]
+ self.scons = self.scons_path(self.aegis_tmpdir)
+ self.scons_lib_dir = self.scons_lib_dir_path(self.aegis_tmpdir)
+
+ commands.extend([
+ 'mkdir %(aegis_tmpdir)s',
+ (lambda: os.chdir(self.aegis_tmpdir), 'cd %(aegis_tmpdir)s'),
+ '%(aegis)s -cp -ind -p %(aegis_parent_project)s .',
+ '%(aegis)s -cp -ind -p %(aegis_project)s -delta %(run_number)s .',
+ ])
+
+ def prep_subversion_run(self, commands, removals):
+ self.svn_tmpdir = make_temp_file(prefix = self.name + '-svn-')
+ removals.append((shutil.rmtree, 'rm -rf %%s', self.svn_tmpdir))
+
+ self.scons = self.scons_path(self.svn_tmpdir)
+ self.scons_lib_dir = self.scons_lib_dir_path(self.svn_tmpdir)
+
+ commands.extend([
+ 'mkdir %(svn_tmpdir)s',
+ '%(svn)s co %(svn_co_flag)s -r %(run_number)s %(subversion_url)s %(svn_tmpdir)s',
+ ])
+
+ def individual_run(self, run_number, archive_list, prepare=None):
+ """
+ Performs an individual run of the default SCons invocations.
+ """
+
+ commands = []
+ removals = []
+
+ if prepare:
+ prepare(commands, removals)
+
+ save_scons = self.scons
+ save_scons_wrapper = self.scons_wrapper
+ save_scons_lib_dir = self.scons_lib_dir
+
+ if self.outdir is None:
+ self.outdir = self.orig_cwd
+ elif not os.path.isabs(self.outdir):
+ self.outdir = os.path.join(self.orig_cwd, self.outdir)
+
+ if self.scons is None:
+ self.scons = self.scons_path(self.orig_cwd)
+
+ if self.scons_lib_dir is None:
+ self.scons_lib_dir = self.scons_lib_dir_path(self.orig_cwd)
+
+ if self.scons_wrapper is None:
+ self.scons_wrapper = self.scons
+
+ if not run_number:
+ run_number = self.find_next_run_number(self.outdir, self.prefix)
+
+ self.run_number = str(run_number)
+
+ self.prefix_run = self.prefix + '-%03d' % run_number
+
+ if self.targets0 is None:
+ self.targets0 = self.startup_targets
+ if self.targets1 is None:
+ self.targets1 = self.targets
+ if self.targets2 is None:
+ self.targets2 = self.targets
+
+ self.tmpdir = make_temp_file(prefix = self.name + '-')
+
+ commands.extend([
+ 'mkdir %(tmpdir)s',
+
+ (os.chdir, 'cd %%s', self.tmpdir),
+ ])
+
+ for archive in archive_list:
+ if not os.path.isabs(archive):
+ archive = os.path.join(self.orig_cwd, archive)
+ if os.path.isdir(archive):
+ dest = os.path.split(archive)[1]
+ commands.append((shutil.copytree, 'cp -r %%s %%s', archive, dest))
+ else:
+ suffix = self.archive_splitext(archive)[1]
+ unpack_command = self.unpack_map.get(suffix)
+ if not unpack_command:
+ dest = os.path.split(archive)[1]
+ commands.append((shutil.copyfile, 'cp %%s %%s', archive, dest))
+ else:
+ commands.append(unpack_command + (archive,))
+
+ commands.extend([
+ (os.chdir, 'cd %%s', self.subdir),
+ ])
+
+ commands.extend(self.initial_commands)
+
+ commands.extend([
+ (lambda: read_tree('.'),
+ 'find * -type f | xargs cat > /dev/null'),
+
+ (self.set_env, 'export %%s=%%s',
+ 'SCONS_LIB_DIR', self.scons_lib_dir),
+
+ '%(python)s %(scons_wrapper)s --version',
+ ])
+
+ index = 0
+ for run_command in self.run_commands:
+ setattr(self, 'prof%d' % index, self.profile_name(index))
+ c = (
+ self.log_execute,
+ self.log_display,
+ run_command,
+ self.logfile_name(index),
+ )
+ commands.append(c)
+ index = index + 1
+
+ commands.extend([
+ (os.chdir, 'cd %%s', self.orig_cwd),
+ ])
+
+ if not os.environ.get('PRESERVE'):
+ commands.extend(removals)
+
+ commands.append((shutil.rmtree, 'rm -rf %%s', self.tmpdir))
+
+ self.run_command_list(commands, self.__dict__)
+
+ self.scons = save_scons
+ self.scons_lib_dir = save_scons_lib_dir
+ self.scons_wrapper = save_scons_wrapper
+
+ #
+
+ def help_time(self):
+ help = """\
+ Usage: scons-time time [OPTIONS] FILE [...]
+
+ -C DIR, --chdir=DIR Change to DIR before looking for files
+ -f FILE, --file=FILE Read configuration from specified FILE
+ --fmt=FORMAT, --format=FORMAT Print data in specified FORMAT
+ -h, --help Print this help and exit
+ -p STRING, --prefix=STRING Use STRING as log file/profile prefix
+ -t NUMBER, --tail=NUMBER Only report the last NUMBER files
+ --which=TIMER Plot timings for TIMER: total,
+ SConscripts, SCons, commands.
+ """
+ sys.stdout.write(self.outdent(help))
+ sys.stdout.flush()
+
+ def do_time(self, argv):
+
+ format = 'ascii'
+ logfile_path = lambda x: x
+ tail = None
+ which = 'total'
+
+ short_opts = '?C:f:hp:t:'
+
+ long_opts = [
+ 'chdir=',
+ 'file=',
+ 'fmt=',
+ 'format=',
+ 'help',
+ 'prefix=',
+ 'tail=',
+ 'title=',
+ 'which=',
+ ]
+
+ opts, args = getopt.getopt(argv[1:], short_opts, long_opts)
+
+ for o, a in opts:
+ if o in ('-C', '--chdir'):
+ self.chdir = a
+ elif o in ('-f', '--file'):
+ self.config_file = a
+ elif o in ('--fmt', '--format'):
+ format = a
+ elif o in ('-?', '-h', '--help'):
+ self.do_help(['help', 'time'])
+ sys.exit(0)
+ elif o in ('-p', '--prefix'):
+ self.prefix = a
+ elif o in ('-t', '--tail'):
+ tail = int(a)
+ elif o in ('--title',):
+ self.title = a
+ elif o in ('--which',):
+ if not a in self.time_strings.keys():
+ sys.stderr.write('%s: time: Unrecognized timer "%s".\n' % (self.name, a))
+ sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+ which = a
+
+ if self.config_file:
+ execfile(self.config_file, self.__dict__)
+
+ if self.chdir:
+ os.chdir(self.chdir)
+ logfile_path = lambda x, c=self.chdir: os.path.join(c, x)
+
+ if not args:
+
+ pattern = '%s*.log' % self.prefix
+ args = self.args_to_files([pattern], tail)
+
+ if not args:
+ if self.chdir:
+ directory = self.chdir
+ else:
+ directory = os.getcwd()
+
+ sys.stderr.write('%s: time: No arguments specified.\n' % self.name)
+ sys.stderr.write('%s No %s*.log files found in "%s".\n' % (self.name_spaces, self.prefix, directory))
+ sys.stderr.write('%s Type "%s help time" for help.\n' % (self.name_spaces, self.name))
+ sys.exit(1)
+
+ else:
+
+ args = self.args_to_files(args, tail)
+
+ cwd_ = os.getcwd() + os.sep
+
+ if format == 'ascii':
+
+ columns = ("Total", "SConscripts", "SCons", "commands")
+ self.ascii_table(args, columns, self.get_debug_times, logfile_path)
+
+ elif format == 'gnuplot':
+
+ results = self.collect_results(args, self.get_debug_times,
+ self.time_strings[which])
+
+ self.gnuplot_results(results, fmt='%s %.6f')
+
+ else:
+
+ sys.stderr.write('%s: time: Unknown format "%s".\n' % (self.name, format))
+ sys.exit(1)
+
+if __name__ == '__main__':
+ opts, args = getopt.getopt(sys.argv[1:], 'h?V', ['help', 'version'])
+
+ ST = SConsTimer()
+
+ for o, a in opts:
+ if o in ('-?', '-h', '--help'):
+ ST.do_help(['help'])
+ sys.exit(0)
+ elif o in ('-V', '--version'):
+ sys.stdout.write('scons-time version\n')
+ sys.exit(0)
+
+ if not args:
+ sys.stderr.write('Type "%s help" for usage.\n' % ST.name)
+ sys.exit(1)
+
+ ST.execute_subcommand(args)
diff --git a/tools/scons/scons.py b/tools/scons/scons.py
new file mode 100755
index 000000000..88276a47e
--- /dev/null
+++ b/tools/scons/scons.py
@@ -0,0 +1,165 @@
+#! /usr/bin/env python
+#
+# SCons - a Software Constructor
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/script/scons.py 3842 2008/12/20 22:59:52 scons"
+
+__version__ = "1.2.0"
+
+__build__ = "r3842"
+
+__buildsys__ = "scons-dev"
+
+__date__ = "2008/12/20 22:59:52"
+
+__developer__ = "scons"
+
+import os
+import os.path
+import sys
+
+##############################################################################
+# BEGIN STANDARD SCons SCRIPT HEADER
+#
+# This is the cut-and-paste logic so that a self-contained script can
+# interoperate correctly with different SCons versions and installation
+# locations for the engine. If you modify anything in this section, you
+# should also change other scripts that use this same header.
+##############################################################################
+
+# Strip the script directory from sys.path() so on case-insensitive
+# (WIN32) systems Python doesn't think that the "scons" script is the
+# "SCons" package. Replace it with our own library directories
+# (version-specific first, in case they installed by hand there,
+# followed by generic) so we pick up the right version of the build
+# engine modules if they're in either directory.
+
+script_dir = sys.path[0]
+
+if script_dir in sys.path:
+ sys.path.remove(script_dir)
+
+libs = []
+
+if os.environ.has_key("SCONS_LIB_DIR"):
+ libs.append(os.environ["SCONS_LIB_DIR"])
+
+local_version = 'scons-local-' + __version__
+local = 'scons-local'
+if script_dir:
+ local_version = os.path.join(script_dir, local_version)
+ local = os.path.join(script_dir, local)
+libs.append(os.path.abspath(local_version))
+libs.append(os.path.abspath(local))
+
+scons_version = 'scons-%s' % __version__
+
+prefs = []
+
+if sys.platform == 'win32':
+ # sys.prefix is (likely) C:\Python*;
+ # check only C:\Python*.
+ prefs.append(sys.prefix)
+ prefs.append(os.path.join(sys.prefix, 'Lib', 'site-packages'))
+else:
+ # On other (POSIX) platforms, things are more complicated due to
+ # the variety of path names and library locations. Try to be smart
+ # about it.
+ if script_dir == 'bin':
+ # script_dir is `pwd`/bin;
+ # check `pwd`/lib/scons*.
+ prefs.append(os.getcwd())
+ else:
+ if script_dir == '.' or script_dir == '':
+ script_dir = os.getcwd()
+ head, tail = os.path.split(script_dir)
+ if tail == "bin":
+ # script_dir is /foo/bin;
+ # check /foo/lib/scons*.
+ prefs.append(head)
+
+ head, tail = os.path.split(sys.prefix)
+ if tail == "usr":
+ # sys.prefix is /foo/usr;
+ # check /foo/usr/lib/scons* first,
+ # then /foo/usr/local/lib/scons*.
+ prefs.append(sys.prefix)
+ prefs.append(os.path.join(sys.prefix, "local"))
+ elif tail == "local":
+ h, t = os.path.split(head)
+ if t == "usr":
+ # sys.prefix is /foo/usr/local;
+ # check /foo/usr/local/lib/scons* first,
+ # then /foo/usr/lib/scons*.
+ prefs.append(sys.prefix)
+ prefs.append(head)
+ else:
+ # sys.prefix is /foo/local;
+ # check only /foo/local/lib/scons*.
+ prefs.append(sys.prefix)
+ else:
+ # sys.prefix is /foo (ends in neither /usr or /local);
+ # check only /foo/lib/scons*.
+ prefs.append(sys.prefix)
+
+ temp = map(lambda x: os.path.join(x, 'lib'), prefs)
+ temp.extend(map(lambda x: os.path.join(x,
+ 'lib',
+ 'python' + sys.version[:3],
+ 'site-packages'),
+ prefs))
+ prefs = temp
+
+ # Add the parent directory of the current python's library to the
+ # preferences. On SuSE-91/AMD64, for example, this is /usr/lib64,
+ # not /usr/lib.
+ try:
+ libpath = os.__file__
+ except AttributeError:
+ pass
+ else:
+ # Split /usr/libfoo/python*/os.py to /usr/libfoo/python*.
+ libpath, tail = os.path.split(libpath)
+ # Split /usr/libfoo/python* to /usr/libfoo
+ libpath, tail = os.path.split(libpath)
+ # Check /usr/libfoo/scons*.
+ prefs.append(libpath)
+
+# Look first for 'scons-__version__' in all of our preference libs,
+# then for 'scons'.
+libs.extend(map(lambda x: os.path.join(x, scons_version), prefs))
+libs.extend(map(lambda x: os.path.join(x, 'scons'), prefs))
+
+sys.path = libs + sys.path
+
+##############################################################################
+# END STANDARD SCons SCRIPT HEADER
+##############################################################################
+
+if __name__ == "__main__":
+ import SCons.Script
+ # this does all the work, and calls sys.exit
+ # with the proper exit status when done.
+ SCons.Script.main()
diff --git a/tools/scons/sconsign.py b/tools/scons/sconsign.py
new file mode 100755
index 000000000..a33b7b422
--- /dev/null
+++ b/tools/scons/sconsign.py
@@ -0,0 +1,502 @@
+#! /usr/bin/env python
+#
+# SCons - a Software Constructor
+#
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#
+
+__revision__ = "src/script/sconsign.py 3842 2008/12/20 22:59:52 scons"
+
+__version__ = "1.2.0"
+
+__build__ = "r3842"
+
+__buildsys__ = "scons-dev"
+
+__date__ = "2008/12/20 22:59:52"
+
+__developer__ = "scons"
+
+import os
+import os.path
+import sys
+import time
+
+##############################################################################
+# BEGIN STANDARD SCons SCRIPT HEADER
+#
+# This is the cut-and-paste logic so that a self-contained script can
+# interoperate correctly with different SCons versions and installation
+# locations for the engine. If you modify anything in this section, you
+# should also change other scripts that use this same header.
+##############################################################################
+
+# Strip the script directory from sys.path() so on case-insensitive
+# (WIN32) systems Python doesn't think that the "scons" script is the
+# "SCons" package. Replace it with our own library directories
+# (version-specific first, in case they installed by hand there,
+# followed by generic) so we pick up the right version of the build
+# engine modules if they're in either directory.
+
+script_dir = sys.path[0]
+
+if script_dir in sys.path:
+ sys.path.remove(script_dir)
+
+libs = []
+
+if os.environ.has_key("SCONS_LIB_DIR"):
+ libs.append(os.environ["SCONS_LIB_DIR"])
+
+local_version = 'scons-local-' + __version__
+local = 'scons-local'
+if script_dir:
+ local_version = os.path.join(script_dir, local_version)
+ local = os.path.join(script_dir, local)
+libs.append(os.path.abspath(local_version))
+libs.append(os.path.abspath(local))
+
+scons_version = 'scons-%s' % __version__
+
+prefs = []
+
+if sys.platform == 'win32':
+ # sys.prefix is (likely) C:\Python*;
+ # check only C:\Python*.
+ prefs.append(sys.prefix)
+ prefs.append(os.path.join(sys.prefix, 'Lib', 'site-packages'))
+else:
+ # On other (POSIX) platforms, things are more complicated due to
+ # the variety of path names and library locations. Try to be smart
+ # about it.
+ if script_dir == 'bin':
+ # script_dir is `pwd`/bin;
+ # check `pwd`/lib/scons*.
+ prefs.append(os.getcwd())
+ else:
+ if script_dir == '.' or script_dir == '':
+ script_dir = os.getcwd()
+ head, tail = os.path.split(script_dir)
+ if tail == "bin":
+ # script_dir is /foo/bin;
+ # check /foo/lib/scons*.
+ prefs.append(head)
+
+ head, tail = os.path.split(sys.prefix)
+ if tail == "usr":
+ # sys.prefix is /foo/usr;
+ # check /foo/usr/lib/scons* first,
+ # then /foo/usr/local/lib/scons*.
+ prefs.append(sys.prefix)
+ prefs.append(os.path.join(sys.prefix, "local"))
+ elif tail == "local":
+ h, t = os.path.split(head)
+ if t == "usr":
+ # sys.prefix is /foo/usr/local;
+ # check /foo/usr/local/lib/scons* first,
+ # then /foo/usr/lib/scons*.
+ prefs.append(sys.prefix)
+ prefs.append(head)
+ else:
+ # sys.prefix is /foo/local;
+ # check only /foo/local/lib/scons*.
+ prefs.append(sys.prefix)
+ else:
+ # sys.prefix is /foo (ends in neither /usr or /local);
+ # check only /foo/lib/scons*.
+ prefs.append(sys.prefix)
+
+ temp = map(lambda x: os.path.join(x, 'lib'), prefs)
+ temp.extend(map(lambda x: os.path.join(x,
+ 'lib',
+ 'python' + sys.version[:3],
+ 'site-packages'),
+ prefs))
+ prefs = temp
+
+ # Add the parent directory of the current python's library to the
+ # preferences. On SuSE-91/AMD64, for example, this is /usr/lib64,
+ # not /usr/lib.
+ try:
+ libpath = os.__file__
+ except AttributeError:
+ pass
+ else:
+ # Split /usr/libfoo/python*/os.py to /usr/libfoo/python*.
+ libpath, tail = os.path.split(libpath)
+ # Split /usr/libfoo/python* to /usr/libfoo
+ libpath, tail = os.path.split(libpath)
+ # Check /usr/libfoo/scons*.
+ prefs.append(libpath)
+
+# Look first for 'scons-__version__' in all of our preference libs,
+# then for 'scons'.
+libs.extend(map(lambda x: os.path.join(x, scons_version), prefs))
+libs.extend(map(lambda x: os.path.join(x, 'scons'), prefs))
+
+sys.path = libs + sys.path
+
+##############################################################################
+# END STANDARD SCons SCRIPT HEADER
+##############################################################################
+
+import cPickle
+import imp
+import string
+import whichdb
+
+import SCons.SConsign
+
+def my_whichdb(filename):
+ if filename[-7:] == ".dblite":
+ return "SCons.dblite"
+ try:
+ f = open(filename + ".dblite", "rb")
+ f.close()
+ return "SCons.dblite"
+ except IOError:
+ pass
+ return _orig_whichdb(filename)
+
+_orig_whichdb = whichdb.whichdb
+whichdb.whichdb = my_whichdb
+
+def my_import(mname):
+ if '.' in mname:
+ i = string.rfind(mname, '.')
+ parent = my_import(mname[:i])
+ fp, pathname, description = imp.find_module(mname[i+1:],
+ parent.__path__)
+ else:
+ fp, pathname, description = imp.find_module(mname)
+ return imp.load_module(mname, fp, pathname, description)
+
+class Flagger:
+ default_value = 1
+ def __setitem__(self, item, value):
+ self.__dict__[item] = value
+ self.default_value = 0
+ def __getitem__(self, item):
+ return self.__dict__.get(item, self.default_value)
+
+Do_Call = None
+Print_Directories = []
+Print_Entries = []
+Print_Flags = Flagger()
+Verbose = 0
+Readable = 0
+
+def default_mapper(entry, name):
+ try:
+ val = eval("entry."+name)
+ except:
+ val = None
+ return str(val)
+
+def map_action(entry, name):
+ try:
+ bact = entry.bact
+ bactsig = entry.bactsig
+ except AttributeError:
+ return None
+ return '%s [%s]' % (bactsig, bact)
+
+def map_timestamp(entry, name):
+ try:
+ timestamp = entry.timestamp
+ except AttributeError:
+ timestamp = None
+ if Readable and timestamp:
+ return "'" + time.ctime(timestamp) + "'"
+ else:
+ return str(timestamp)
+
+def map_bkids(entry, name):
+ try:
+ bkids = entry.bsources + entry.bdepends + entry.bimplicit
+ bkidsigs = entry.bsourcesigs + entry.bdependsigs + entry.bimplicitsigs
+ except AttributeError:
+ return None
+ result = []
+ for i in xrange(len(bkids)):
+ result.append(nodeinfo_string(bkids[i], bkidsigs[i], " "))
+ if result == []:
+ return None
+ return string.join(result, "\n ")
+
+map_field = {
+ 'action' : map_action,
+ 'timestamp' : map_timestamp,
+ 'bkids' : map_bkids,
+}
+
+map_name = {
+ 'implicit' : 'bkids',
+}
+
+def field(name, entry, verbose=Verbose):
+ if not Print_Flags[name]:
+ return None
+ fieldname = map_name.get(name, name)
+ mapper = map_field.get(fieldname, default_mapper)
+ val = mapper(entry, name)
+ if verbose:
+ val = name + ": " + val
+ return val
+
+def nodeinfo_raw(name, ninfo, prefix=""):
+ # This just formats the dictionary, which we would normally use str()
+ # to do, except that we want the keys sorted for deterministic output.
+ d = ninfo.__dict__
+ try:
+ keys = ninfo.field_list + ['_version_id']
+ except AttributeError:
+ keys = d.keys()
+ keys.sort()
+ l = []
+ for k in keys:
+ l.append('%s: %s' % (repr(k), repr(d.get(k))))
+ if '\n' in name:
+ name = repr(name)
+ return name + ': {' + string.join(l, ', ') + '}'
+
+def nodeinfo_cooked(name, ninfo, prefix=""):
+ try:
+ field_list = ninfo.field_list
+ except AttributeError:
+ field_list = []
+ f = lambda x, ni=ninfo, v=Verbose: field(x, ni, v)
+ if '\n' in name:
+ name = repr(name)
+ outlist = [name+':'] + filter(None, map(f, field_list))
+ if Verbose:
+ sep = '\n ' + prefix
+ else:
+ sep = ' '
+ return string.join(outlist, sep)
+
+nodeinfo_string = nodeinfo_cooked
+
+def printfield(name, entry, prefix=""):
+ outlist = field("implicit", entry, 0)
+ if outlist:
+ if Verbose:
+ print " implicit:"
+ print " " + outlist
+ outact = field("action", entry, 0)
+ if outact:
+ if Verbose:
+ print " action: " + outact
+ else:
+ print " " + outact
+
+def printentries(entries, location):
+ if Print_Entries:
+ for name in Print_Entries:
+ try:
+ entry = entries[name]
+ except KeyError:
+ sys.stderr.write("sconsign: no entry `%s' in `%s'\n" % (name, location))
+ else:
+ try:
+ ninfo = entry.ninfo
+ except AttributeError:
+ print name + ":"
+ else:
+ print nodeinfo_string(name, entry.ninfo)
+ printfield(name, entry.binfo)
+ else:
+ names = entries.keys()
+ names.sort()
+ for name in names:
+ entry = entries[name]
+ try:
+ ninfo = entry.ninfo
+ except AttributeError:
+ print name + ":"
+ else:
+ print nodeinfo_string(name, entry.ninfo)
+ printfield(name, entry.binfo)
+
+class Do_SConsignDB:
+ def __init__(self, dbm_name, dbm):
+ self.dbm_name = dbm_name
+ self.dbm = dbm
+
+ def __call__(self, fname):
+ # The *dbm modules stick their own file suffixes on the names
+ # that are passed in. This is causes us to jump through some
+ # hoops here to be able to allow the user
+ try:
+ # Try opening the specified file name. Example:
+ # SPECIFIED OPENED BY self.dbm.open()
+ # --------- -------------------------
+ # .sconsign => .sconsign.dblite
+ # .sconsign.dblite => .sconsign.dblite.dblite
+ db = self.dbm.open(fname, "r")
+ except (IOError, OSError), e:
+ print_e = e
+ try:
+ # That didn't work, so try opening the base name,
+ # so that if the actually passed in 'sconsign.dblite'
+ # (for example), the dbm module will put the suffix back
+ # on for us and open it anyway.
+ db = self.dbm.open(os.path.splitext(fname)[0], "r")
+ except (IOError, OSError):
+ # That didn't work either. See if the file name
+ # they specified just exists (independent of the dbm
+ # suffix-mangling).
+ try:
+ open(fname, "r")
+ except (IOError, OSError), e:
+ # Nope, that file doesn't even exist, so report that
+ # fact back.
+ print_e = e
+ sys.stderr.write("sconsign: %s\n" % (print_e))
+ return
+ except KeyboardInterrupt:
+ raise
+ except cPickle.UnpicklingError:
+ sys.stderr.write("sconsign: ignoring invalid `%s' file `%s'\n" % (self.dbm_name, fname))
+ return
+ except Exception, e:
+ sys.stderr.write("sconsign: ignoring invalid `%s' file `%s': %s\n" % (self.dbm_name, fname, e))
+ return
+
+ if Print_Directories:
+ for dir in Print_Directories:
+ try:
+ val = db[dir]
+ except KeyError:
+ sys.stderr.write("sconsign: no dir `%s' in `%s'\n" % (dir, args[0]))
+ else:
+ self.printentries(dir, val)
+ else:
+ keys = db.keys()
+ keys.sort()
+ for dir in keys:
+ self.printentries(dir, db[dir])
+
+ def printentries(self, dir, val):
+ print '=== ' + dir + ':'
+ printentries(cPickle.loads(val), dir)
+
+def Do_SConsignDir(name):
+ try:
+ fp = open(name, 'rb')
+ except (IOError, OSError), e:
+ sys.stderr.write("sconsign: %s\n" % (e))
+ return
+ try:
+ sconsign = SCons.SConsign.Dir(fp)
+ except KeyboardInterrupt:
+ raise
+ except cPickle.UnpicklingError:
+ sys.stderr.write("sconsign: ignoring invalid .sconsign file `%s'\n" % (name))
+ return
+ except Exception, e:
+ sys.stderr.write("sconsign: ignoring invalid .sconsign file `%s': %s\n" % (name, e))
+ return
+ printentries(sconsign.entries, args[0])
+
+##############################################################################
+
+import getopt
+
+helpstr = """\
+Usage: sconsign [OPTIONS] FILE [...]
+Options:
+ -a, --act, --action Print build action information.
+ -c, --csig Print content signature information.
+ -d DIR, --dir=DIR Print only info about DIR.
+ -e ENTRY, --entry=ENTRY Print only info about ENTRY.
+ -f FORMAT, --format=FORMAT FILE is in the specified FORMAT.
+ -h, --help Print this message and exit.
+ -i, --implicit Print implicit dependency information.
+ -r, --readable Print timestamps in human-readable form.
+ --raw Print raw Python object representations.
+ -s, --size Print file sizes.
+ -t, --timestamp Print timestamp information.
+ -v, --verbose Verbose, describe each field.
+"""
+
+opts, args = getopt.getopt(sys.argv[1:], "acd:e:f:hirstv",
+ ['act', 'action',
+ 'csig', 'dir=', 'entry=',
+ 'format=', 'help', 'implicit',
+ 'raw', 'readable',
+ 'size', 'timestamp', 'verbose'])
+
+
+for o, a in opts:
+ if o in ('-a', '--act', '--action'):
+ Print_Flags['action'] = 1
+ elif o in ('-c', '--csig'):
+ Print_Flags['csig'] = 1
+ elif o in ('-d', '--dir'):
+ Print_Directories.append(a)
+ elif o in ('-e', '--entry'):
+ Print_Entries.append(a)
+ elif o in ('-f', '--format'):
+ Module_Map = {'dblite' : 'SCons.dblite',
+ 'sconsign' : None}
+ dbm_name = Module_Map.get(a, a)
+ if dbm_name:
+ try:
+ dbm = my_import(dbm_name)
+ except:
+ sys.stderr.write("sconsign: illegal file format `%s'\n" % a)
+ print helpstr
+ sys.exit(2)
+ Do_Call = Do_SConsignDB(a, dbm)
+ else:
+ Do_Call = Do_SConsignDir
+ elif o in ('-h', '--help'):
+ print helpstr
+ sys.exit(0)
+ elif o in ('-i', '--implicit'):
+ Print_Flags['implicit'] = 1
+ elif o in ('--raw',):
+ nodeinfo_string = nodeinfo_raw
+ elif o in ('-r', '--readable'):
+ Readable = 1
+ elif o in ('-s', '--size'):
+ Print_Flags['size'] = 1
+ elif o in ('-t', '--timestamp'):
+ Print_Flags['timestamp'] = 1
+ elif o in ('-v', '--verbose'):
+ Verbose = 1
+
+if Do_Call:
+ for a in args:
+ Do_Call(a)
+else:
+ for a in args:
+ dbm_name = whichdb.whichdb(a)
+ if dbm_name:
+ Map_Module = {'SCons.dblite' : 'dblite'}
+ dbm = my_import(dbm_name)
+ Do_SConsignDB(Map_Module.get(dbm_name, dbm_name), dbm)(a)
+ else:
+ Do_SConsignDir(a)
+
+sys.exit(0)