diff --git a/.travis.yml b/.travis.yml
index e97c65567..5cc0ca33c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,4 +1,15 @@
-language: c
-compiler:
- - gcc
-script: ./bootstrap.sh
+sudo: false
+os:
+ - linux
+env:
+ - TOOLSET=gcc TEST_ALL_EXTRAS=
+# - TOOLSET=gcc TEST_ALL_EXTRAS=--extras
+ - TOOLSET=clang TEST_ALL_EXTRAS=
+# - TOOLSET=clang TEST_ALL_EXTRAS=--extras
+language: python
+python:
+ - 2.7
+ - 2.6
+script:
+ - ./bootstrap.sh --with-toolset=${TOOLSET}
+ - cd test && python test_all.py ${TOOLSET} ${TEST_ALL_EXTRAS}
diff --git a/Jamroot.jam b/Jamroot.jam
index 54e9cc839..2f50eed09 100644
--- a/Jamroot.jam
+++ b/Jamroot.jam
@@ -6,12 +6,11 @@ import package ;
import os ;
local ext = "" ;
-if [ os.on-windows ]
+if [ os.on-windows ] || [ os.on-vms ]
{
ext = ".exe" ;
}
-
package.install boost-build-engine boost-build
: # properties
: # binaries
diff --git a/bootstrap.sh b/bootstrap.sh
index 8523c3db3..c99242ee8 100755
--- a/bootstrap.sh
+++ b/bootstrap.sh
@@ -57,7 +57,7 @@ my_dir="."
if test "x$TOOLSET" = x; then
guessed_toolset=`$my_dir/src/engine/build.sh --guess-toolset`
case $guessed_toolset in
- acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp )
+ acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp | xlcpp )
TOOLSET=$guessed_toolset
;;
diff --git a/bootstrap_vms.com b/bootstrap_vms.com
new file mode 100644
index 000000000..3d8afaab9
--- /dev/null
+++ b/bootstrap_vms.com
@@ -0,0 +1,48 @@
+$! Copyright 2015 Artur Shepilko.
+$!
+$! Distributed under the Boost Software License, Version 1.0.
+$! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+$!
+$ THIS_FACILITY = "BOOSTBUILD"
+$
+$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'")
+$ save_verify = f$verify(verify)
+$ save_default = f$env("DEFAULT")
+$
+$ SAY := WRITE SYS$OUTPUT
+$
+$ ON WARNING THEN CONTINUE
+$ ON ERROR THEN GOTO ERROR
+$
+$ SAY "I|Bootstrapping the build engine..."
+$
+$ set def [.src.engine]
+$ @build_vms /out=[--]bootstrap.log
+$
+$ set def 'save_default'
+$
+$ if f$search("[.src.engine.bin_vms]b2.exe") .eqs. "" then goto ERROR
+$ copy [.src.engine.bin_vms]b2.exe []
+$ copy [.src.engine.bin_vms]bjam.exe []
+$
+$ SAY "I|Bootstrapping is done, B2.EXE created."
+$ type sys$input
+$DECK
+
+ To build and install under ROOT: directory, run:
+ MC []B2 --prefix="/root" install
+
+ Set B2 command:
+ B2 :== $ROOT:[BIN]B2.EXE
+
+$EOD
+$ sts = 1
+$
+$EXIT:
+$ set def 'save_default'
+$ exit 'sts' + (0 * f$verify(save_verify))
+
+$ERROR:
+$ SAY "E|Failed to bootstrap build engine, see BOOTSTRAP.LOG for details."
+$ sts = 4
+$ goto EXIT
diff --git a/doc/bjam.qbk b/doc/bjam.qbk
index a57a44021..6b754d13d 100644
--- a/doc/bjam.qbk
+++ b/doc/bjam.qbk
@@ -71,7 +71,7 @@ cd /jam source location/
sh ./build.sh
]
-For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/v2/engine=.
+For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/src/engine=.
If the scripts fail to detect an appropriate toolset to build with your particular toolset may not be auto-detectable. In that case, you can specify the toolset as the first argument, this assumes that the toolset is readily available in the =PATH=.
@@ -421,7 +421,7 @@ This facility is useful for correct header file scanning, since many compilers w
The basic =b2= language entity is called a rule. A rule is defined in two parts: the procedure and the actions. The procedure is a body of jam statements to be run when the rule is invoked; the actions are the OS shell commands to execute when updating the built targets of the rule.
-Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments). Note that 'return' doesn't actually cause a return, i.e., is a no-op unless it is the last statement of the last block executed within rule body.
+Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments).
The =b2= statements for defining and invoking rules are as follows:
@@ -727,7 +727,23 @@ rule GLOB ( /directories/ * : /patterns/ * : /downcase-opt/ ? )
Using the same wildcards as for the patterns in the switch statement. It is invoked by being used as an argument to a rule invocation inside of "=[ ]=". For example: "[^FILES = \[ GLOB dir1 dir2 : *.c *.h \]]" sets =FILES= to the list of C source and header files in =dir1= and =dir2=. The resulting filenames are the full pathnames, including the directory, but the pattern is applied only to the file name without the directory.
-If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, filenames are always downcased before matching.
+If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, and OpenVMS, filenames are always downcased before matching.
+
+[endsect]
+
+[section =GLOB_ARCHIVE= ]
+
+The =GLOB_ARCHIVE= rule does name globbing of object archive members.
+
+[pre
+rule GLOB_ARCHIVE ( /archives/ * : /member-patterns/ * : /downcase-opt/ ? : /symbol-patterns/ ? )
+]
+
+Similarly to =GLOB=, this rule is used to match names of member files in an archive (static object library). List of successfully matched members is returned or null otherwise. The resulting member names are qualified with pathname of the containing archive in the form =archive-path(member-name)=. Member patterns are for matching member name only; when no wildcards specified -- an exact match is assumed. Member names generally correspond to object file names and as such are platform-specific -- use of platform-defined object suffix in the matching patterns can allow for portability.
+
+If /downcase-opt/ is supplied, the member names are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT, Cygwin, and OpenVMS, filenames are always downcased before matching.
+
+Additionally, members can be matched with symbol/function patterns on supported platforms (currently, OpenVMS only). In this case, members containing the matching symbols are returned. Member and symbol patterns are applied as OR conditions, with member patterns taking precedence. On unsupported platforms, null is returned when any symbol patterns are specified.
[endsect]
@@ -841,6 +857,7 @@ rule SHELL ( /command/ : * )
[variablelist
[[=exit-status=] [In addition to the output the result status of the executed command is returned as a second element of the result.]]
[[=no-output=] [Don't capture the output of the command. Instead an empty ("") string value is returned in place of the output.]]
+ [[=strip-eol=] [Remove trailing end-of-line character from output, if any.]]
]
Because the Perforce/Jambase defines a =SHELL= rule which hides the
@@ -1000,7 +1017,7 @@ Creates new /vars/ inside to the enclosing ={}= block, obscuring any previous va
return /values/ ;
]
-Within a rule body, the return statement sets the return value for an invocation of the rule. It does *not* cause the rule to return; a rule's value is actually the value of the last statement executed, so a return should be the last statement executed before the rule "naturally" returns.
+Within a rule body, the return statement sets the return value for an invocation of the rule and returns to the caller.
[pre
switch /value/
@@ -1038,6 +1055,18 @@ while /cond/ { /statements/ }
Repeatedly execute /statements/ while /cond/ remains true upon entry. (See the description of /cond/ expression syntax under if, above).
+[pre
+break ;
+]
+
+Immediately exits the nearest enclosing while or for loop.
+
+[pre
+continue ;
+]
+
+Jumps to the top of the nearest enclosing while or for loop.
+
[endsect]
[section Variables]
@@ -1149,12 +1178,19 @@ prints [^"C:/Program Files/Borland"]
it can be important to pass them true windows-style paths. The =:W=
modifier, *under Cygwin only*, turns a cygwin path into a Win32 path using
the [@http://www.cygwin.com/cygwin-api/func-cygwin-conv-to-win32-path.html
- =cygwin_conv_to_win32_path=] function. On other platforms, the string is
- unchanged. For example
+ =cygwin_conv_to_win32_path=] function. For example
``
x = "/cygdrive/c/Program Files/Borland" ; ECHO $(x:W) ;
``
prints [^"C:\\Program Files\\Borland"] on Cygwin
+
+Similarly, when used on OpenVMS, the =:W= modifier translates a POSIX-style path into native VMS-style format using =decc$to_vms= CRTL function. This modifier is generally used inside action blocks to properly specify file paths in VMS-specific commands. For example
+``
+ x = "subdir/filename.c" ; ECHO $(x:W) ;
+``
+prints [^"\[.subdir\]filename.c"] on OpenVMS
+
+On other platforms, the string is unchanged.
]]
[[[^:['chars]]]
diff --git a/doc/jamfile.jam b/doc/jamfile.jam
index 22d67e62a..e61017d65 100644
--- a/doc/jamfile.jam
+++ b/doc/jamfile.jam
@@ -5,7 +5,7 @@
import quickbook ;
using boostbook ;
-project tools/build/v2/doc
+project tools/build/doc
;
boostbook userman : src/standalone.xml
diff --git a/doc/src/install.xml b/doc/src/install.xml
index 40c8b5ccb..8a272c73d 100644
--- a/doc/src/install.xml
+++ b/doc/src/install.xml
@@ -45,7 +45,7 @@
If you are not using a Boost.Build package, but rather the version
bundled with the Boost C++ Libraries, the above commands should be run
- in the tools/build/v2 directory.
+ in the tools/build directory.
Now that Boost.Build is installed, you can try some of the examples. Copy
@@ -97,7 +97,7 @@
Boost.Build release package, except for
jam_src directory. If you're using Boost CVS
to obtain Boost.Build, as opposed to release package, take
- everything from the tools/build/v2 directory.
+ everything from the tools/build directory.
For a check, make sure that
/usr/share/boost-build/boost-build.jam is installed.
diff --git a/doc/src/overview.xml b/doc/src/overview.xml
index 6dbb38a84..1c2310b96 100644
--- a/doc/src/overview.xml
+++ b/doc/src/overview.xml
@@ -728,7 +728,7 @@ b2 toolset=gcc variant=debug optimization=space
- Do no execute the commands, only print them.
+ Do not execute the commands, only print them.
@@ -775,7 +775,7 @@ b2 toolset=gcc variant=debug optimization=space
- Supress all informational messages.
+ Suppress all informational messages.
diff --git a/doc/src/reference.xml b/doc/src/reference.xml
index 9ac0bfa4d..e258985ee 100644
--- a/doc/src/reference.xml
+++ b/doc/src/reference.xml
@@ -126,12 +126,12 @@ rule check-target-builds ( target message ? : true-properties * : false-properti
This function can only be used when passing requirements or usage
requirements to a metatarget rule. For example, to make an application link
- to a library if it's avavailable, one has use the following:
+ to a library if it's available, one has use the following:
exe app : app.cpp : [ check-target-builds has_foo "System has foo" : <library>foo : <define>FOO_MISSING=1 ] ;
- For another example, the alias rule can be used to consolidate configuraiton
+ For another example, the alias rule can be used to consolidate configuration
choices and make them available to other metatargets, like so:
alias foobar : : : : [ check-target-builds has_foo "System has foo" : <library>foo : <library>bar ] ;
@@ -221,7 +221,7 @@ ECHO [ glob-tree *.cpp : .svn ] ;
alwaysalways building a metatarget
- The always funciton takes a single
+ The always function takes a single
parameter—a list of metatarget names. The top-level targets produced
by the named metatargets will be always considered out of date. Consider this example:
@@ -366,7 +366,7 @@ path-constant DATA : data/a.txt ;
static
- A feature controling how libraries are built.
+ A feature controlling how libraries are built.
@@ -529,7 +529,7 @@ path-constant DATA : data/a.txt ;
Specify an additional directory where the system should
look for shared libraries when the executable or shared
library is run. This feature only affects Unix
- compilers. Plase see
+ compilers. Please see
in for details.
@@ -551,7 +551,7 @@ path-constant DATA : data/a.txt ;
As the result, the executable can be run without changing system
paths to shared libraries or installing the libraries to system
paths. This is very
- convenient during development. Plase see the FAQ entry for details. Note that on Mac
OSX, the paths are unconditionally hardcoded by the linker, and it
is not possible to disable that behaviour.
@@ -725,8 +725,27 @@ path-constant DATA : data/a.txt ;
architecture
+
+ Allowed values:
+ x86,
+ ia64,
+ sparc,
+ power,
+ mips1,
+ mips2,
+ mips3,
+ mips4,
+ mips32,
+ mips32r2,
+ mips64,
+ parisc,
+ arm,
+ combined,
+ combined-x86-power.
+
+
The architecture features specifies
- the general processor familty to generate code for.
+ the general processor family to generate code for.
@@ -1082,7 +1101,7 @@ using msvc : &toolset_ops; ;
script has been explicitly specified for the current target
platform. Used setup script will be passed the target platform
identifier (x86, x86_amd64, x86_ia64, amd64 or ia64) as a
- arameter. If not specified a default script is chosen based on the
+ parameter. If not specified a default script is chosen based on the
used compiler binary, e.g. vcvars32.bat or
vsvars32.bat.
@@ -1361,7 +1380,7 @@ using como-linux : &toolset_ops; ;
originally produced by Metrowerks and presently developed by
Freescale. Boost.Build supports only the versions of the compiler that
target x86 processors. All such versions were released by Metrowerks
- before aquisition and are not sold any longer. The last version known
+ before acquisition and are not sold any longer. The last version known
to work is 9.4.
The module is initialized using the following syntax:
@@ -1491,7 +1510,7 @@ using sun : &toolset_ops; ;
When using this compiler on complex C++ code, such as the
Boost C++ library, it is
- recommended to specify the following options when intializing the
+ recommended to specify the following options when initializing the
sun module:
-library=stlport4 -features=tmplife -features=tmplrefstatic
@@ -1547,7 +1566,7 @@ using vacpp ;
The STLport library
is an alternative implementation of C++ runtime library. Boost.Build
- supports using that library on Windows platfrom. Linux is
+ supports using that library on Windows platform. Linux is
hampered by different naming of libraries in each STLport
version and is not officially supported.
@@ -2213,8 +2232,8 @@ import path : native make : native-path make-path ;
If there's one viable alternative, it's choosen. Otherwise,
an attempt is made to find one best alternative. An alternative
- a is better than another alternative b, iff the set of properties
- in b's condition is a strict subset of the set of properities of
+ a is better than another alternative b, if the set of properties
+ in b's condition is a strict subset of the set of properties of
'a's condition. If there's one viable alternative, which is
better than all others, it's selected. Otherwise, an error is
reported.
@@ -2254,7 +2273,7 @@ import path : native make : native-path make-path ;
request or requirements.If requirements include a conditional property, and
- condiiton of this property is true in context of common
+ condition of this property is true in context of common
properties, then the conditional property should be in common
properties as well.
@@ -2284,7 +2303,7 @@ exe a : a.cpp
Several factors determine the location of a concrete
file target. All files in a project are built under
- the directory bin unless this is overriden by the build-dir project
+ the directory bin unless this is overridden by the build-dir project
attribute. Under bin is a path that depends on the properties
used to build each target. This path is uniquely determined by
all non-free, non-incidental properties. For example,
@@ -2360,7 +2379,7 @@ exe a : a.cpp
subvalue1...subvalueN are legal values of some
of F's subfeatures. For example, the properties
<toolset>gcc <toolset-version>3.0.1 can be
- expressed more conscisely using a value-string, as
+ expressed more concisely using a value-string, as
<toolset>gcc-3.0.1.A property set is a set of properties (i.e. a
@@ -2434,7 +2453,7 @@ exe a : a.cpp
propagated
property, the build systems attempts to use the same property
when building any of its dependencies as part of that main
- target. For instance, when an optimized exectuable is
+ target. For instance, when an optimized executable is
requested, one usually wants it to be linked with optimized
libraries. Thus, the <optimization> feature is
propagated.
@@ -2476,7 +2495,7 @@ exe a : a.cpp
Normally a feature only generates a subvariant directory
when its value differs from its default value,
- leading to an assymmetric subvariant directory structure for
+ leading to an asymmetric subvariant directory structure for
certain values of the feature. A symmetric feature
always generates a corresponding
subvariant directory.
@@ -2670,7 +2689,7 @@ lib/b.cpp -- regular file
- It emphasises that projects and targets are different things.
+ It emphasis that projects and targets are different things.
@@ -2686,7 +2705,7 @@ lib/b.cpp -- regular file
of targets, and it's reasonable to use them directly from other
project.
- 2. The rule for unpacking tar is inplemented in terms of
+ 2. The rule for unpacking tar is implemented in terms of
"patch-file", for maintainability, and therefore, must use main
target name that contains slashes?
diff --git a/doc/src/tutorial.xml b/doc/src/tutorial.xml
index e3cb41879..3227a6718 100644
--- a/doc/src/tutorial.xml
+++ b/doc/src/tutorial.xml
@@ -520,8 +520,8 @@ lib utils : utils.cpp /boost/filesystem//fs ;
lib core : core.cpp utils ;
exe app : app.cpp core ;
This works no matter what kind of linking is used. When core
- is built as a shared library, it is linked directly into
- utils. Static libraries can't link to other
+ is built as a shared library, links utils
+ directly into it. Static libraries can't link to other
libraries, so when core is built as a static
library, its dependency on utils is passed along to
core's dependents, causing app
diff --git a/example/build-id/Jamroot.jam b/example/build-id/Jamroot.jam
new file mode 100644
index 000000000..e69de29bb
diff --git a/example/built_tool/core/Jamfile.jam b/example/built_tool/core/Jamfile.jam
index 2d96f7182..d4ec62382 100644
--- a/example/built_tool/core/Jamfile.jam
+++ b/example/built_tool/core/Jamfile.jam
@@ -1,5 +1,6 @@
import toolset ;
+import os ;
project : requirements ../tblgen//tblgen ;
@@ -28,3 +29,11 @@ actions tblgen bind COMMAND
{
$(COMMAND:E=tblgen) > $(<)
}
+
+if [ os.name ] = VMS
+{
+ actions tblgen bind COMMAND
+ {
+ PIPE MCR $(COMMAND:WE=tblgen) > $(<:W)
+ }
+}
diff --git a/example/customization/inline_file.py b/example/customization/inline_file.py
index a48c5fc9d..9f13acd87 100644
--- a/example/customization/inline_file.py
+++ b/example/customization/inline_file.py
@@ -1,8 +1,8 @@
#!/usr/bin/python
-# Copyright 2003 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import sys
from string import strip
@@ -38,7 +38,7 @@ else:
file_to_include = sys.argv[2]
in_file = open(file_to_include, "r");
- variable_name = strip(in_file.readline())
+ variable_name = strip(in_file.readline())
out_file.write("extern const char %s[] = {\n%s};\n\n" % (variable_name, quote_file(in_file)))
in_file.close()
out_file.close()
diff --git a/example/customization/verbatim.jam b/example/customization/verbatim.jam
index 931fdce33..700aafb9a 100644
--- a/example/customization/verbatim.jam
+++ b/example/customization/verbatim.jam
@@ -8,6 +8,8 @@
# which are relevant to your case, remove everything else, and then change names
# and actions to taste.
+import os ;
+
# Declare a new target type. This allows Boost.Build to do something sensible
# when targets with the .verbatim extension are found in sources.
import type ;
@@ -49,3 +51,11 @@ actions inline-file
{
"./inline_file.py" $(<) $(>)
}
+
+if [ os.name ] = VMS
+{
+ actions inline-file
+ {
+ python inline_file.py $(<:W) $(>:W)
+ }
+}
diff --git a/example/customization/verbatim.py b/example/customization/verbatim.py
index be285976c..089bd3831 100644
--- a/example/customization/verbatim.py
+++ b/example/customization/verbatim.py
@@ -1,6 +1,6 @@
-# Copyright 2010 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2010 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This file is only used with Python port of Boost.Build
diff --git a/example/generator/soap.jam b/example/generator/soap.jam
index d28bfdecc..b3d9e7633 100644
--- a/example/generator/soap.jam
+++ b/example/generator/soap.jam
@@ -14,6 +14,7 @@ import generators ;
import feature ;
import common ;
import "class" : new ;
+import os ;
type.register GCI : gci ;
@@ -75,3 +76,11 @@ actions touch
{
$(TOUCH) $(<)
}
+
+if [ os.name ] = VMS
+{
+ actions touch
+ {
+ $(TOUCH) $(<:W)
+ }
+}
diff --git a/example/make/jamroot.jam b/example/make/jamroot.jam
index 7bb98e353..3f5ec5b56 100644
--- a/example/make/jamroot.jam
+++ b/example/make/jamroot.jam
@@ -1,5 +1,6 @@
import feature ;
import toolset ;
+import os ;
path-constant HERE : . ;
make main.cpp : main_cpp.pro : @do-something ;
@@ -11,3 +12,11 @@ actions do-something
{
"$(PYTHON:E=python)" "$(HERE)/foo.py" "$(>)" "$(<)"
}
+
+if [ os.name ] = VMS
+{
+ actions do-something
+ {
+ $(PYTHON:E=python) $(HERE:W)foo.py $(>:W) $(<:W)
+ }
+}
diff --git a/example/testing/compile-fail.cpp b/example/testing/compile-fail.cpp
index cd3e09409..a219fa5c6 100644
--- a/example/testing/compile-fail.cpp
+++ b/example/testing/compile-fail.cpp
@@ -8,9 +8,10 @@
//
#include
+#include
int main()
{
std::cout << "Bye!\n";
- return 1
+ return EXIT_FAILURE
}
diff --git a/example/testing/fail.cpp b/example/testing/fail.cpp
index f1efa1ee2..965661188 100644
--- a/example/testing/fail.cpp
+++ b/example/testing/fail.cpp
@@ -8,9 +8,10 @@
//
#include
+#include
int main()
{
std::cout << "Bye!\n";
- return 1;
+ return EXIT_FAILURE;
}
diff --git a/example/testing/success.cpp b/example/testing/success.cpp
index e2fa7a4a9..bf5588062 100644
--- a/example/testing/success.cpp
+++ b/example/testing/success.cpp
@@ -8,9 +8,10 @@
//
#include
+#include
int main()
{
std::cout << "Hi!\n";
- return 0;
+ return EXIT_SUCCESS;
}
diff --git a/src/build-system.jam b/src/build-system.jam
index 247326a96..76db2d377 100644
--- a/src/build-system.jam
+++ b/src/build-system.jam
@@ -555,6 +555,10 @@ local rule should-clean-project ( project )
{
default-toolset = msvc ;
}
+ else if [ os.name ] = VMS
+ {
+ default-toolset = vmsdecc ;
+ }
else if [ os.name ] = MACOSX
{
default-toolset = darwin ;
@@ -585,26 +589,6 @@ local rule should-clean-project ( project )
local properties = [ $(build-request).get-at 2 ] ;
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if $(properties)
- {
- expanded = [ build-request.expand-no-defaults $(properties) ] ;
- local xexpanded ;
- for local e in $(expanded)
- {
- xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
- }
- expanded = $(xexpanded) ;
- }
- else
- {
- expanded = [ property-set.empty ] ;
- }
-
-
# Check that we actually found something to build.
if ! $(current-project) && ! $(target-ids)
{
@@ -695,6 +679,29 @@ local rule should-clean-project ( project )
configure.set-log-file $(first-build-build-dir)/config.log ;
config-cache.load $(first-build-build-dir)/project-cache.jam ;
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if $(properties)
+ {
+ expanded += [ build-request.convert-command-line-elements $(properties) ] ;
+ expanded = [ build-request.expand-no-defaults $(expanded) ] ;
+ local xexpanded ;
+ for local e in $(expanded)
+ {
+ xexpanded += [ property-set.create [ feature.split $(e) ] ] ;
+ }
+ expanded = $(xexpanded) ;
+ }
+ else
+ {
+ expanded = [ property-set.empty ] ;
+ }
+
# Now that we have a set of targets to build and a set of property sets to
# build the targets with, we can start the main build process by using each
# property set to generate virtual targets from all of our listed targets
diff --git a/src/build/ac.jam b/src/build/ac.jam
index 71bc16c37..c4bd6b7e1 100644
--- a/src/build/ac.jam
+++ b/src/build/ac.jam
@@ -16,6 +16,7 @@ import virtual-target ;
import generators ;
import property ;
import print ;
+import regex ;
project.initialize $(__name__) ;
.project = [ project.current ] ;
@@ -43,9 +44,13 @@ rule find-include-path ( properties : header : provided-path ? )
else
{
local a = [ class.new action : ac.generate-include : [ property-set.create $(header) ] ] ;
- local cpp = [ class.new file-target $(header).cpp exact : CPP : $(.project) : $(a) ] ;
+ # Create a new CPP target named after the header.
+ # Replace dots (".") in target basename for portability.
+ local basename = [ regex.replace $(header:D=) "[.]" "_" ] ;
+ local header-target = $(header:S=:B=$(basename)) ;
+ local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ;
cpp = [ virtual-target.register $(cpp) ] ;
- local result = [ generators.construct $(.project) $(header) : OBJ : $(properties) : $(cpp) : true ] ;
+ local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ;
local jam-targets ;
for t in $(result[2-])
{
diff --git a/src/build/alias.py b/src/build/alias.py
index 575e53609..e9078c746 100755
--- a/src/build/alias.py
+++ b/src/build/alias.py
@@ -1,13 +1,13 @@
-# Copyright 2003, 2004, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003, 2004, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Status: ported (danielw)
# Base revision: 56043
# This module defines the 'alias' rule and associated class.
#
-# Alias is just a main target which returns its source targets without any
+# Alias is just a main target which returns its source targets without any
# processing. For example::
#
# alias bin : hello test_hello ;
@@ -18,7 +18,7 @@
# alias platform-src : win.cpp : NT ;
# alias platform-src : linux.cpp : LINUX ;
# exe main : main.cpp platform-src ;
-#
+#
# Lastly, it's possible to create local alias for some target, with different
# properties::
#
@@ -29,7 +29,7 @@ import targets
import property_set
from b2.manager import get_manager
-from b2.util import metatarget
+from b2.util import metatarget, is_iterable_typed
class AliasTarget(targets.BasicTarget):
@@ -37,9 +37,17 @@ class AliasTarget(targets.BasicTarget):
targets.BasicTarget.__init__(self, *args)
def construct(self, name, source_targets, properties):
+ if __debug__:
+ from .virtual_target import VirtualTarget
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, VirtualTarget)
+ assert isinstance(properties, property_set.PropertySet)
return [property_set.empty(), source_targets]
def compute_usage_requirements(self, subvariant):
+ if __debug__:
+ from .virtual_target import Subvariant
+ assert isinstance(subvariant, Subvariant)
base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
# Add source's usage requirement. If we don't do this, "alias" does not
# look like 100% alias.
@@ -47,7 +55,11 @@ class AliasTarget(targets.BasicTarget):
@metatarget
def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
-
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
project = get_manager().projects().current()
targets = get_manager().targets()
diff --git a/src/build/build-request.jam b/src/build/build-request.jam
index 2a1bbb467..3110713b7 100644
--- a/src/build/build-request.jam
+++ b/src/build/build-request.jam
@@ -150,8 +150,7 @@ rule from-command-line ( command-line * )
if [ MATCH "(.*=.*)" : $(e) ]
|| [ looks-like-implicit-value $(e:D=) : $(feature-space) ]
{
- properties += [ convert-command-line-element $(e) :
- $(feature-space) ] ;
+ properties += $(e) ;
}
else if $(e)
{
@@ -169,9 +168,22 @@ rule from-command-line ( command-line * )
}
-# Converts one element of command line build request specification into internal
+# Converts a list of elements of command line build request specification into internal
# form. Expects all the project files to already be loaded.
#
+rule convert-command-line-elements ( elements * )
+{
+ local result ;
+ for local e in $(elements)
+ {
+ result += [ convert-command-line-element $(e) ] ;
+ }
+ return $(result) ;
+}
+
+
+# Converts one element of command line build request specification into internal
+# form.
local rule convert-command-line-element ( e )
{
local result ;
@@ -286,37 +298,60 @@ rule __test__ ( )
local r ;
- r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
- assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug dynamic ;
-
try ;
{
- build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
+ r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ;
+ build-request.convert-command-line-elements [ $(r).get-at 2 ] ;
}
catch \"static\" is not an implicit feature value ;
+ r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+ assert.equal [ $(r).get-at 1 ] : ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug dynamic ;
+
r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
assert.equal [ $(r).get-at 1 ] : target ;
- assert.equal [ $(r).get-at 2 ] : debug dynamic ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic ]
+ : debug dynamic ;
r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug dynamic static ;
+ assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug runtime-link=dynamic,static ]
+ : debug dynamic static ;
r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic
- gcc/static ;
+ assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ]
+ : debug gcc/dynamic gcc/static ;
r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
assert.equal [ $(r).get-at 1 ] : ;
- assert.equal [ $(r).get-at 2 ] : msvc gcc/static
- borland/static ;
+ assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ;
+
+ assert.equal
+ [ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ]
+ : msvc gcc/static borland/static ;
r = [ build-request.from-command-line bjam gcc-3.0 ] ;
assert.equal [ $(r).get-at 1 ] : ;
assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
+ assert.equal
+ [ build-request.convert-command-line-elements gcc-3.0 ]
+ : gcc-3.0 ;
+
feature.finish-test build-request-test-temp ;
}
diff --git a/src/build/build_request.py b/src/build/build_request.py
index 118033e1e..194251688 100644
--- a/src/build/build_request.py
+++ b/src/build/build_request.py
@@ -11,18 +11,20 @@ import b2.build.feature
feature = b2.build.feature
from b2.util.utility import *
+from b2.util import is_iterable_typed
import b2.build.property_set as property_set
def expand_no_defaults (property_sets):
""" Expand the given build request by combining all property_sets which don't
specify conflicting non-free features.
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
# First make all features and subfeatures explicit
expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
-
+
# Now combine all of the expanded property_sets
product = __x_product (expanded_property_sets)
-
+
return [property_set.create(p) for p in product]
@@ -30,6 +32,7 @@ def __x_product (property_sets):
""" Return the cross-product of all elements of property_sets, less any
that would contain conflicting values for single-valued features.
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
x_product_seen = set()
return __x_product_aux (property_sets, x_product_seen)[0]
@@ -42,8 +45,10 @@ def __x_product_aux (property_sets, seen_features):
Returns a tuple of:
- list of lists of Property instances, such that within each list, no two Property instance
have the same feature, and no Property is for feature in seen_features.
- - set of features we saw in property_sets
+ - set of features we saw in property_sets
"""
+ assert is_iterable_typed(property_sets, property_set.PropertySet)
+ assert isinstance(seen_features, set)
if not property_sets:
return ([], set())
@@ -76,7 +81,7 @@ def __x_product_aux (property_sets, seen_features):
result.append(properties + inner)
else:
result.append(properties)
-
+
if inner_seen & these_features:
# Some of elements in property_sets[1:] conflict with elements of property_sets[0],
# Try again, this time omitting elements of property_sets[0]
@@ -85,11 +90,12 @@ def __x_product_aux (property_sets, seen_features):
return (result, inner_seen | these_features)
-
+
def looks_like_implicit_value(v):
"""Returns true if 'v' is either implicit value, or
the part before the first '-' symbol is implicit value."""
+ assert isinstance(v, basestring)
if feature.is_implicit_value(v):
return 1
else:
@@ -104,7 +110,7 @@ def from_command_line(command_line):
and constructs build request from it. Returns a list of two
lists. First is the set of targets specified in the command line,
and second is the set of requested build properties."""
-
+ assert is_iterable_typed(command_line, basestring)
targets = []
properties = []
@@ -112,17 +118,17 @@ def from_command_line(command_line):
if e[:1] != "-":
# Build request spec either has "=" in it, or completely
# consists of implicit feature values.
- if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
- properties += convert_command_line_element(e)
+ if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
+ properties.append(e)
elif e:
targets.append(e)
return [targets, properties]
-
+
# Converts one element of command line build request specification into
# internal form.
def convert_command_line_element(e):
-
+ assert isinstance(e, basestring)
result = None
parts = e.split("/")
for p in parts:
@@ -133,7 +139,7 @@ def convert_command_line_element(e):
lresult = [("<%s>%s" % (feature, v)) for v in values]
else:
lresult = p.split(",")
-
+
if p.find('-') == -1:
# FIXME: first port property.validate
# property.validate cannot handle subfeatures,
@@ -149,68 +155,68 @@ def convert_command_line_element(e):
return [property_set.create(b2.build.feature.split(r)) for r in result]
-###
+###
### rule __test__ ( )
### {
### import assert feature ;
-###
+###
### feature.prepare-test build-request-test-temp ;
-###
+###
### import build-request ;
### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
### import errors : try catch ;
### import feature : feature subfeature ;
-###
+###
### feature toolset : gcc msvc borland : implicit ;
### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
### 3.0 3.0.1 3.0.2 : optional ;
-###
+###
### feature variant : debug release : implicit composite ;
### feature inlining : on off ;
### feature "include" : : free ;
-###
+###
### feature stdlib : native stlport : implicit ;
-###
+###
### feature runtime-link : dynamic static : symmetric ;
-###
-###
+###
+###
### local r ;
-###
-### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
+###
+### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug dynamic ;
-###
+###
### try ;
### {
-###
+###
### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
### }
### catch \"static\" is not a value of an implicit feature ;
-###
-###
+###
+###
### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : target ;
### assert.equal [ $(r).get-at 2 ] : debug dynamic ;
-###
+###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug dynamic static ;
-###
+###
### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic
+### assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic
### gcc/static ;
-###
+###
### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
-### assert.equal [ $(r).get-at 2 ] : msvc gcc/static
+### assert.equal [ $(r).get-at 2 ] : msvc gcc/static
### borland/static ;
-###
+###
### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
-###
+###
### feature.finish-test build-request-test-temp ;
### }
-###
-###
+###
+###
diff --git a/src/build/config-cache.jam b/src/build/config-cache.jam
index 5297dbb84..ff69ff6f6 100644
--- a/src/build/config-cache.jam
+++ b/src/build/config-cache.jam
@@ -7,6 +7,7 @@ import errors ;
import regex ;
import path ;
import project ;
+import os ;
rule get ( name )
{
@@ -49,6 +50,14 @@ actions write
@($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)"
}
+if [ os.name ] = VMS
+{
+ actions write
+ {
+ @($(STDOUT):E=$(FILE_CONTENTS:J=)) | TYPE SYS$INPUT /OUT=$(<:W)
+ }
+}
+
rule load ( cache-file )
{
if $(.cache-file)
diff --git a/src/build/configure.py b/src/build/configure.py
index 0426832c4..10afb8209 100644
--- a/src/build/configure.py
+++ b/src/build/configure.py
@@ -10,17 +10,17 @@
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
-# build process.
+# build process.
# - Reporting what is built, and how it is configured.
import b2.build.property as property
import b2.build.property_set as property_set
-import b2.build.targets
+from b2.build import targets as targets_
from b2.manager import get_manager
from b2.util.sequence import unique
-from b2.util import bjam_signature, value_to_jam
+from b2.util import bjam_signature, value_to_jam, is_iterable
import bjam
import os
@@ -41,17 +41,22 @@ __log_fd = -1
def register_components(components):
"""Declare that the components specified by the parameter exist."""
+ assert is_iterable(components)
__components.extend(components)
-
+
def components_building(components):
"""Declare that the components specified by the parameters will be build."""
+ assert is_iterable(components)
__built_components.extend(components)
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
+ assert isinstance(component, basestring)
+ assert isinstance(message, basestring)
__component_logs.setdefault(component, []).append(message)
def log_check_result(result):
+ assert isinstance(result, basestring)
global __announced_checks
if not __announced_checks:
print "Performing configuration checks"
@@ -60,7 +65,9 @@ def log_check_result(result):
print result
def log_library_search_result(library, result):
- log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width))
+ assert isinstance(library, basestring)
+ assert isinstance(result, basestring)
+ log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width))
def print_component_configuration():
@@ -84,6 +91,10 @@ def builds(metatarget_reference, project, ps, what):
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
+ assert isinstance(metatarget_reference, basestring)
+ assert isinstance(project, targets_.ProjectTarget)
+ assert isinstance(ps, property_set.PropertySet)
+ assert isinstance(what, basestring)
result = []
@@ -93,12 +104,12 @@ def builds(metatarget_reference, project, ps, what):
result = False
__builds_cache[(what, ps)] = False
- targets = b2.build.targets.generate_from_reference(
+ targets = targets_.generate_from_reference(
metatarget_reference, project, ps).targets()
jam_targets = []
for t in targets:
jam_targets.append(t.actualize())
-
+
x = (" - %s" % what).rjust(__width)
if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
__builds_cache[(what, ps)] = True
@@ -112,6 +123,7 @@ def builds(metatarget_reference, project, ps, what):
return existing
def set_log_file(log_file_name):
+ assert isinstance(log_file_name, basestring)
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
@@ -134,7 +146,7 @@ class CheckTargetBuildsWorker:
self.false_properties = property.create_from_strings(false_properties, True)
def check(self, ps):
-
+ assert isinstance(ps, property_set.PropertySet)
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
toolset = ps.get('toolset')[0]
@@ -146,7 +158,7 @@ class CheckTargetBuildsWorker:
ps.get_properties("architecture")
rps = property_set.create(relevant)
t = get_manager().targets().current()
- p = t.project()
+ p = t.project()
if builds(self.target, p, rps, "%s builds" % self.target):
choosen = self.true_properties
else:
diff --git a/src/build/engine.py b/src/build/engine.py
index 35333eaa0..4c2c97eaf 100644
--- a/src/build/engine.py
+++ b/src/build/engine.py
@@ -10,46 +10,50 @@ import operator
import re
import b2.build.property_set as property_set
-import b2.util
-class BjamAction:
+from b2.util import set_jam_action, is_iterable
+
+class BjamAction(object):
"""Class representing bjam action defined from Python."""
-
+
def __init__(self, action_name, function):
+ assert isinstance(action_name, basestring)
+ assert callable(function) or function is None
self.action_name = action_name
self.function = function
-
- def __call__(self, targets, sources, property_set):
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
# Bjam actions defined from Python have only the command
# to execute, and no associated jam procedural code. So
# passing 'property_set' to it is not necessary.
bjam_interface.call("set-update-action", self.action_name,
targets, sources, [])
if self.function:
- self.function(targets, sources, property_set)
+ self.function(targets, sources, property_set_)
-class BjamNativeAction:
+class BjamNativeAction(BjamAction):
"""Class representing bjam action defined by Jam code.
We still allow to associate a Python callable that will
be called when this action is installed on any target.
"""
-
- def __init__(self, action_name, function):
- self.action_name = action_name
- self.function = function
-
- def __call__(self, targets, sources, property_set):
+
+ def __call__(self, targets, sources, property_set_):
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
if self.function:
- self.function(targets, sources, property_set)
-
+ self.function(targets, sources, property_set_)
+
p = []
if property_set:
- p = property_set.raw()
+ p = property_set_.raw()
+
+ set_jam_action(self.action_name, targets, sources, p)
- b2.util.set_jam_action(self.action_name, targets, sources, p)
-
action_modifiers = {"updated": 0x01,
"together": 0x02,
"ignore": 0x04,
@@ -77,6 +81,8 @@ class Engine:
targets = [targets]
if isinstance (sources, str):
sources = [sources]
+ assert is_iterable(targets)
+ assert is_iterable(sources)
for target in targets:
for source in sources:
@@ -105,6 +111,11 @@ class Engine:
echo [ on $(targets) return $(MY-VAR) ] ;
"Hello World"
"""
+ if isinstance(targets, str):
+ targets = [targets]
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+
return bjam_interface.call('get-target-variable', targets, variable)
def set_target_variable (self, targets, variable, value, append=0):
@@ -114,13 +125,19 @@ class Engine:
where to generate targets, and will also be available to
updating rule for that 'taret'.
"""
- if isinstance (targets, str):
+ if isinstance (targets, str):
targets = [targets]
+ if isinstance(value, str):
+ value = [value]
+
+ assert is_iterable(targets)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
for target in targets:
self.do_set_target_variable (target, variable, value, append)
- def set_update_action (self, action_name, targets, sources, properties=property_set.empty()):
+ def set_update_action (self, action_name, targets, sources, properties=None):
""" Binds a target to the corresponding update action.
If target needs to be updated, the action registered
with action_name will be used.
@@ -128,9 +145,17 @@ class Engine:
either 'register_action' or 'register_bjam_action'
method.
"""
- assert(isinstance(properties, property_set.PropertySet))
- if isinstance (targets, str):
+ if isinstance(targets, str):
targets = [targets]
+ if isinstance(sources, str):
+ sources = [sources]
+ if properties is None:
+ properties = property_set.empty()
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert(isinstance(properties, property_set.PropertySet))
+
self.do_set_update_action (action_name, targets, sources, properties)
def register_action (self, action_name, command, bound_list = [], flags = [],
@@ -149,10 +174,11 @@ class Engine:
This function will be called by set_update_action, and can
set additional target variables.
"""
- if self.actions.has_key(action_name):
- raise "Bjam action %s is already defined" % action_name
-
- assert(isinstance(flags, list))
+ assert isinstance(action_name, basestring)
+ assert isinstance(command, basestring)
+ assert is_iterable(bound_list)
+ assert is_iterable(flags)
+ assert function is None or callable(function)
bjam_flags = reduce(operator.or_,
(action_modifiers[flag] for flag in flags), 0)
@@ -178,25 +204,37 @@ class Engine:
# action name. This way, jamfile rules that take action names
# can just register them without specially checking if
# action is already registered.
+ assert isinstance(action_name, basestring)
+ assert function is None or callable(function)
if not self.actions.has_key(action_name):
self.actions[action_name] = BjamNativeAction(action_name, function)
-
+
# Overridables
- def do_set_update_action (self, action_name, targets, sources, property_set):
+ def do_set_update_action (self, action_name, targets, sources, property_set_):
+ assert isinstance(action_name, basestring)
+ assert is_iterable(targets)
+ assert is_iterable(sources)
+ assert isinstance(property_set_, property_set.PropertySet)
action = self.actions.get(action_name)
if not action:
raise Exception("No action %s was registered" % action_name)
- action(targets, sources, property_set)
+ action(targets, sources, property_set_)
def do_set_target_variable (self, target, variable, value, append):
+ assert isinstance(target, basestring)
+ assert isinstance(variable, basestring)
+ assert is_iterable(value)
+ assert isinstance(append, int) # matches bools
if append:
bjam_interface.call("set-target-variable", target, variable, value, "true")
else:
bjam_interface.call("set-target-variable", target, variable, value)
-
+
def do_add_dependency (self, target, source):
+ assert isinstance(target, basestring)
+ assert isinstance(source, basestring)
bjam_interface.call("DEPENDS", target, source)
-
-
+
+
diff --git a/src/build/errors.py b/src/build/errors.py
index d9dceefe0..69d8a37d3 100644
--- a/src/build/errors.py
+++ b/src/build/errors.py
@@ -1,8 +1,8 @@
# Status: being written afresh by Vladimir Prus
-# Copyright 2007 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2007 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This file is supposed to implement error reporting for Boost.Build.
# Experience with jam version has shown that printing full backtrace
@@ -22,7 +22,7 @@ import sys
def format(message, prefix=""):
parts = str(message).split("\n")
return "\n".join(prefix+p for p in parts)
-
+
class Context:
@@ -71,7 +71,7 @@ class ExceptionWithUserContext(Exception):
traceback.print_tb(self.original_tb_)
elif self.stack_:
for l in traceback.format_list(self.stack_):
- print l,
+ print l,
else:
print " use the '--stacktrace' option to get Python stacktrace"
print
@@ -87,9 +87,9 @@ def user_error_checkpoint(callable):
errors.handle_stray_exception(e)
finally:
errors.pop_user_context()
-
+
return wrapper
-
+
class Errors:
def __init__(self):
@@ -116,12 +116,12 @@ class Errors:
def handle_stray_exception(self, e):
raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
- e, sys.exc_info()[2])
+ e, sys.exc_info()[2])
def __call__(self, message):
self._count = self._count + 1
- raise ExceptionWithUserContext(message, self.contexts_[:],
+ raise ExceptionWithUserContext(message, self.contexts_[:],
stack=traceback.extract_stack())
-
-
+
+
diff --git a/src/build/feature.jam b/src/build/feature.jam
index ee6abc591..e58edcbed 100644
--- a/src/build/feature.jam
+++ b/src/build/feature.jam
@@ -30,6 +30,7 @@ local rule setup ( )
link-incompatible
subfeature
order-sensitive
+ hidden
;
.all-features = ;
diff --git a/src/build/feature.py b/src/build/feature.py
index 827dae340..6cf81a1a1 100644
--- a/src/build/feature.py
+++ b/src/build/feature.py
@@ -1,15 +1,15 @@
# Status: ported, except for unit tests.
# Base revision: 64488
#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2002, 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2002, 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
-from b2.util import utility, bjam_signature
+from b2.util import utility, bjam_signature, is_iterable_typed
import b2.util.set
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
from b2.exceptions import *
@@ -25,6 +25,9 @@ class Feature(object):
_attribute_name_to_integer = {}
def __init__(self, name, values, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
+ assert is_iterable_typed(attributes, basestring)
self._name = name
self._values = values
self._default = None
@@ -42,12 +45,19 @@ class Feature(object):
return self._values
def add_values(self, values):
+ assert is_iterable_typed(values, basestring)
self._values.extend(values)
def attributes(self):
return self._attributes
def set_default(self, value):
+ assert isinstance(value, basestring)
+ for attr in ('free', 'optional'):
+ if getattr(self, attr)():
+ get_manager().errors()('"{}" feature "<{}>" cannot have a default value.'
+ .format(attr, self._name))
+
self._default = value
def default(self):
@@ -61,6 +71,7 @@ class Feature(object):
return self._subfeatures
def add_subfeature(self, name):
+ assert isinstance(name, Feature)
self._subfeatures.append(name)
def parent(self):
@@ -72,19 +83,21 @@ class Feature(object):
return self._parent
def set_parent(self, feature, value):
+ assert isinstance(feature, Feature)
+ assert isinstance(value, basestring)
self._parent = (feature, value)
def __str__(self):
return self._name
-
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __all_attributes, __all_features, __implicit_features, __composite_properties
global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features
global __all_subfeatures
-
+
# The list with all attribute names.
__all_attributes = [ 'implicit',
'composite',
@@ -107,28 +120,28 @@ def reset ():
return getattr(self, "_attributes") & flag
setattr(Feature, a.replace("-", "_"), probe)
i = i << 1
-
+
# A map containing all features. The key is the feature name.
# The value is an instance of Feature class.
__all_features = {}
-
+
# All non-subfeatures.
__all_top_features = []
-
+
# Maps valus to the corresponding implicit feature
__implicit_features = {}
-
+
# A map containing all composite properties. The key is a Property instance,
# and the value is a list of Property instances
__composite_properties = {}
-
+
__features_with_attributes = {}
for attribute in __all_attributes:
__features_with_attributes [attribute] = []
-
+
# Maps a value to the corresponding subfeature name.
__subfeature_from_value = {}
-
+
# All free features
__free_features = []
@@ -146,6 +159,7 @@ def get(name):
Throws if no feature by such name exists
"""
+ assert isinstance(name, basestring)
return __all_features[name]
# FIXME: prepare-test/finish-test?
@@ -163,12 +177,12 @@ def feature (name, values, attributes = []):
__all_features[name] = feature
# Temporary measure while we have not fully moved from 'gristed strings'
__all_features["<" + name + ">"] = feature
-
+
for attribute in attributes:
__features_with_attributes [attribute].append (name)
name = add_grist(name)
-
+
if 'subfeature' in attributes:
__all_subfeatures.append(name)
else:
@@ -208,9 +222,10 @@ def set_default (feature, value):
def defaults(features):
""" Returns the default property values for the given features.
"""
+ assert is_iterable_typed(features, Feature)
# FIXME: should merge feature and property modules.
- import property
-
+ from . import property
+
result = []
for f in features:
if not f.free() and not f.optional() and f.default():
@@ -221,21 +236,22 @@ def defaults(features):
def valid (names):
""" Returns true iff all elements of names are valid features.
"""
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return all([ valid_one (name) for name in names ])
+ if isinstance(names, str):
+ names = [names]
+ assert is_iterable_typed(names, basestring)
+
+ return all(name in __all_features for name in names)
def attributes (feature):
""" Returns the attributes of the given feature.
"""
+ assert isinstance(feature, basestring)
return __all_features[feature].attributes_string_list()
-
+
def values (feature):
""" Return the values of the given feature.
"""
+ assert isinstance(feature, basestring)
validate_feature (feature)
return __all_features[feature].values()
@@ -243,43 +259,43 @@ def is_implicit_value (value_string):
""" Returns true iff 'value_string' is a value_string
of an implicit feature.
"""
-
+ assert isinstance(value_string, basestring)
if __implicit_features.has_key(value_string):
return __implicit_features[value_string]
-
+
v = value_string.split('-')
if not __implicit_features.has_key(v[0]):
return False
feature = __implicit_features[v[0]]
-
+
for subvalue in (v[1:]):
if not __find_implied_subfeature(feature, subvalue, v[0]):
return False
-
+
return True
def implied_feature (implicit_value):
""" Returns the implicit feature associated with the given implicit value.
"""
+ assert isinstance(implicit_value, basestring)
components = implicit_value.split('-')
-
+
if not __implicit_features.has_key(components[0]):
raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
-
+
return __implicit_features[components[0]]
def __find_implied_subfeature (feature, subvalue, value_string):
-
- #if value_string == None: value_string = ''
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
- if not __subfeature_from_value.has_key(feature) \
- or not __subfeature_from_value[feature].has_key(value_string) \
- or not __subfeature_from_value[feature][value_string].has_key (subvalue):
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue]
+ except KeyError:
return None
-
- return __subfeature_from_value[feature][value_string][subvalue]
# Given a feature and a value of one of its subfeatures, find the name
# of the subfeature. If value-string is supplied, looks for implied
@@ -289,6 +305,9 @@ def __find_implied_subfeature (feature, subvalue, value_string):
# value-string # The value of the main feature
def implied_subfeature (feature, subvalue, value_string):
+ assert isinstance(feature, Feature)
+ assert isinstance(subvalue, basestring)
+ assert isinstance(value_string, basestring)
result = __find_implied_subfeature (feature, subvalue, value_string)
if not result:
raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
@@ -298,28 +317,19 @@ def implied_subfeature (feature, subvalue, value_string):
def validate_feature (name):
""" Checks if all name is a valid feature. Otherwise, raises an exception.
"""
+ assert isinstance(name, basestring)
if not __all_features.has_key(name):
raise InvalidFeature ("'%s' is not a valid feature name" % name)
else:
return __all_features[name]
-def valid (names):
- """ Returns true iff all elements of names are valid features.
- """
- def valid_one (name): return __all_features.has_key (name)
-
- if isinstance (names, str):
- return valid_one (names)
- else:
- return [ valid_one (name) for name in names ]
-
# Uses Property
-def __expand_subfeatures_aux (property, dont_validate = False):
+def __expand_subfeatures_aux (property_, dont_validate = False):
""" Helper for expand_subfeatures.
Given a feature and value, or just a value corresponding to an
implicit feature, returns a property set consisting of all component
subfeatures and their values. For example:
-
+
expand_subfeatures gcc-2.95.2-linux-x86
-> gcc 2.95.2 linux x86
equivalent to:
@@ -329,33 +339,35 @@ def __expand_subfeatures_aux (property, dont_validate = False):
value: The value of the feature.
dont_validate: If True, no validation of value string will be done.
"""
- f = property.feature()
- v = property.value()
+ from . import property # no __debug__ since Property is used elsewhere
+ assert isinstance(property_, property.Property)
+ assert isinstance(dont_validate, int) # matches bools
+
+ f = property_.feature()
+ v = property_.value()
if not dont_validate:
validate_value_string(f, v)
components = v.split ("-")
-
+
v = components[0]
- import property
+ result = [property.Property(f, components[0])]
- result = [property.Property(f, components[0])]
-
subvalues = components[1:]
while len(subvalues) > 0:
subvalue = subvalues [0] # pop the head off of subvalues
subvalues = subvalues [1:]
-
+
subfeature = __find_implied_subfeature (f, subvalue, v)
-
+
# If no subfeature was found, reconstitute the value string and use that
if not subfeature:
return [property.Property(f, '-'.join(components))]
-
+
result.append(property.Property(subfeature, subvalue))
-
+
return result
def expand_subfeatures(properties, dont_validate = False):
@@ -363,11 +375,11 @@ def expand_subfeatures(properties, dont_validate = False):
Make all elements of properties corresponding to implicit features
explicit, and express all subfeature values as separate properties
in their own right. For example, the property
-
+
gcc-2.95.2-linux-x86
-
+
might expand to
-
+
gcc 2.95.2 linux x86
properties: A sequence with elements of the form
@@ -375,6 +387,10 @@ def expand_subfeatures(properties, dont_validate = False):
case of implicit features.
: dont_validate: If True, no validation of value string will be done.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(dont_validate, int) # matches bools
result = []
for p in properties:
# Don't expand subfeatures in subfeatures
@@ -408,6 +424,8 @@ def expand_subfeatures(properties, dont_validate = False):
def extend (name, values):
""" Adds the given values to the given feature.
"""
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(values, basestring)
name = add_grist (name)
__validate_feature (name)
feature = __all_features [name]
@@ -419,7 +437,7 @@ def extend (name, values):
__implicit_features[v] = feature
- if len (feature.values()) == 0 and len (values) > 0:
+ if values and not feature.values() and not(feature.free() or feature.optional()):
# This is the first value specified for this feature,
# take it as default value
feature.set_default(values[0])
@@ -429,6 +447,8 @@ def extend (name, values):
def validate_value_string (f, value_string):
""" Checks that value-string is a valid value-string for the given feature.
"""
+ assert isinstance(f, Feature)
+ assert isinstance(value_string, basestring)
if f.free() or value_string in f.values():
return
@@ -453,39 +473,42 @@ def validate_value_string (f, value_string):
value-string is provided, the subvalues are only valid for the given
value of the feature. Thus, you could say that
mingw is specifc to gcc-2.95.2 as follows:
-
+
extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
feature: The feature whose subfeature is being extended.
-
+
value-string: If supplied, specifies a specific value of the
main feature for which the new subfeature values
are valid.
-
+
subfeature: The name of the subfeature.
-
+
subvalues: The additional values of the subfeature being defined.
"""
def extend_subfeature (feature_name, value_string, subfeature_name, subvalues):
-
+ assert isinstance(feature_name, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature_name, basestring)
+ assert is_iterable_typed(subvalues, basestring)
feature = validate_feature(feature_name)
-
+
if value_string:
validate_value_string(feature, value_string)
subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string)
-
+
extend(subfeature_name, subvalues) ;
subfeature = __all_features[subfeature_name]
if value_string == None: value_string = ''
-
+
if not __subfeature_from_value.has_key(feature):
__subfeature_from_value [feature] = {}
-
+
if not __subfeature_from_value[feature].has_key(value_string):
__subfeature_from_value [feature][value_string] = {}
-
+
for subvalue in subvalues:
__subfeature_from_value [feature][value_string][subvalue] = subfeature
@@ -496,16 +519,16 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes =
feature_name: Root feature that is not a subfeature.
value_string: An optional value-string specifying which feature or
subfeature values this subfeature is specific to,
- if any.
+ if any.
subfeature: The name of the subfeature being declared.
subvalues: The allowed values of this subfeature.
attributes: The attributes of the subfeature.
"""
parent_feature = validate_feature (feature_name)
-
+
# Add grist to the subfeature name if a value-string was supplied
subfeature_name = __get_subfeature_name (subfeature, value_string)
-
+
if subfeature_name in __all_features[feature_name].subfeatures():
message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
message += " specific to '%s'" % value_string
@@ -514,7 +537,7 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes =
# First declare the subfeature as a feature in its own right
f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
f.set_parent(parent_feature, value_string)
-
+
parent_feature.add_subfeature(f)
# Now make sure the subfeature values are known.
@@ -527,7 +550,7 @@ def compose (composite_property_s, component_properties_s):
All parameters are value strings
"""
- import property
+ from . import property
component_properties_s = to_seq (component_properties_s)
composite_property = property.create_from_string(composite_property_s)
@@ -537,7 +560,7 @@ def compose (composite_property_s, component_properties_s):
component_properties = component_properties_s
else:
component_properties = [property.create_from_string(p) for p in component_properties_s]
-
+
if not f.composite():
raise BaseException ("'%s' is not a composite feature" % f)
@@ -550,10 +573,13 @@ def compose (composite_property_s, component_properties_s):
__composite_properties[composite_property] = component_properties
-def expand_composite(property):
- result = [ property ]
- if __composite_properties.has_key(property):
- for p in __composite_properties[property]:
+def expand_composite(property_):
+ if __debug__:
+ from .property import Property
+ assert isinstance(property_, Property)
+ result = [ property_ ]
+ if __composite_properties.has_key(property_):
+ for p in __composite_properties[property_]:
result.extend(expand_composite(p))
return result
@@ -567,7 +593,7 @@ def get_values (feature, properties):
for p in properties:
if get_grist (p) == feature:
result.append (replace_grist (p, ''))
-
+
return result
def free_features ():
@@ -579,6 +605,9 @@ def expand_composites (properties):
""" Expand all composite properties in the set so that all components
are explicitly expressed.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
explicit_features = set(p.feature() for p in properties)
result = []
@@ -604,7 +633,7 @@ def expand_composites (properties):
result.append (x)
elif any(r.feature() == f for r in result):
raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
- "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
+ "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
[r.value() for r in result if r.feature() == f], p, x.value()))
else:
result.append (x)
@@ -617,6 +646,11 @@ def is_subfeature_of (parent_property, f):
feature, or if f is a subfeature of the parent_property's feature
specific to the parent_property's value.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(f, Feature)
+
if not f.subfeature():
return False
@@ -638,38 +672,27 @@ def is_subfeature_of (parent_property, f):
def __is_subproperty_of (parent_property, p):
""" As is_subfeature_of, for subproperties.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert isinstance(p, Property)
return is_subfeature_of (parent_property, p.feature())
-
+
# Returns true iff the subvalue is valid for the feature. When the
# optional value-string is provided, returns true iff the subvalues
# are valid for the given value of the feature.
def is_subvalue(feature, value_string, subfeature, subvalue):
-
+ assert isinstance(feature, basestring)
+ assert isinstance(value_string, basestring)
+ assert isinstance(subfeature, basestring)
+ assert isinstance(subvalue, basestring)
if not value_string:
value_string = ''
-
- if not __subfeature_from_value.has_key(feature):
+ try:
+ return __subfeature_from_value[feature][value_string][subvalue] == subfeature
+ except KeyError:
return False
-
- if not __subfeature_from_value[feature].has_key(value_string):
- return False
-
- if not __subfeature_from_value[feature][value_string].has_key(subvalue):
- return False
-
- if __subfeature_from_value[feature][value_string][subvalue]\
- != subfeature:
- return False
-
- return True
-
-def implied_subfeature (feature, subvalue, value_string):
- result = __find_implied_subfeature (feature, subvalue, value_string)
- if not result:
- raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
-
- return result
# Uses Property
@@ -684,43 +707,50 @@ def expand (properties):
two values of a given non-free feature are directly expressed in the
input, an error is issued.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
expanded = expand_subfeatures(properties)
return expand_composites (expanded)
-
+
# Accepts list of Property objects
def add_defaults (properties):
""" Given a set of properties, add default values for features not
- represented in the set.
+ represented in the set.
Note: if there's there's ordinary feature F1 and composite feature
F2, which includes some value for F1, and both feature have default values,
then the default value of F1 will be added, not the value in F2. This might
not be right idea: consider
-
+
feature variant : debug ... ;
debug : .... on
feature : off on ;
-
+
Here, when adding default for an empty property set, we'll get
-
+
debug off
-
- and that's kind of strange.
+
+ and that's kind of strange.
"""
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+
result = [x for x in properties]
-
+
handled_features = set()
for p in properties:
# We don't add default for conditional properties. We don't want
# debug:DEBUG to be takes as specified value for
if not p.condition():
handled_features.add(p.feature())
-
+
missing_top = [f for f in __all_top_features if not f in handled_features]
more = defaults(missing_top)
result.extend(more)
for p in more:
handled_features.add(p.feature())
-
+
# Add defaults for subfeatures of features which are present
for p in result[:]:
s = p.feature().subfeatures()
@@ -728,7 +758,7 @@ def add_defaults (properties):
for p in more:
handled_features.add(p.feature())
result.extend(more)
-
+
return result
def minimize (properties):
@@ -739,29 +769,31 @@ def minimize (properties):
Implicit properties will be expressed without feature
grist, and sub-property values will be expressed as elements joined
to the corresponding main property.
- """
-
+ """
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
# remove properties implied by composite features
components = []
for property in properties:
if __composite_properties.has_key (property):
components.extend(__composite_properties[property])
properties = b2.util.set.difference (properties, components)
-
+
# handle subfeatures and implicit features
# move subfeatures to the end of the list
properties = [p for p in properties if not p.feature().subfeature()] +\
[p for p in properties if p.feature().subfeature()]
-
+
result = []
while properties:
p = properties[0]
f = p.feature()
-
+
# locate all subproperties of $(x[1]) in the property set
subproperties = __select_subproperties (p, properties)
-
+
if subproperties:
# reconstitute the joined property name
subproperties.sort ()
@@ -774,7 +806,7 @@ def minimize (properties):
# eliminate properties whose value is equal to feature's
# default and which are not symmetric and which do not
# contradict values implied by composite properties.
-
+
# since all component properties of composites in the set
# have been eliminated, any remaining property whose
# feature is the same as a component of a composite in the
@@ -784,7 +816,7 @@ def minimize (properties):
#\
#or get_grist (fullp) in get_grist (components):
# FIXME: restore above
-
+
properties = properties[1:]
@@ -802,17 +834,17 @@ def split (properties):
substitution of backslashes for slashes, since Jam, unbidden,
sometimes swaps slash direction on NT.
"""
-
+ assert isinstance(properties, basestring)
def split_one (properties):
pieces = re.split (__re_slash_or_backslash, properties)
result = []
-
+
for x in pieces:
if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
result = result [0:-1] + [ result [-1] + '/' + x ]
else:
result.append (x)
-
+
return result
if isinstance (properties, str):
@@ -822,32 +854,34 @@ def split (properties):
for p in properties:
result += split_one (p)
return result
-
+
def compress_subproperties (properties):
""" Combine all subproperties into their parent properties
Requires: for every subproperty, there is a parent property. All
features are explicitly expressed.
-
+
This rule probably shouldn't be needed, but
build-request.expand-no-defaults is being abused for unintended
purposes and it needs help
"""
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
result = []
matched_subs = set()
all_subs = set()
for p in properties:
f = p.feature()
-
+
if not f.subfeature():
subs = __select_subproperties (p, properties)
if subs:
-
+
matched_subs.update(subs)
subvalues = '-'.join (sub.value() for sub in subs)
- result.append(b2.build.property.Property(
+ result.append(Property(
p.feature(), p.value() + '-' + subvalues,
p.condition()))
else:
@@ -865,10 +899,16 @@ def compress_subproperties (properties):
# Private methods
def __select_subproperties (parent_property, properties):
+ if __debug__:
+ from .property import Property
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(parent_property, Property)
return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
def __get_subfeature_name (subfeature, value_string):
- if value_string == None:
+ assert isinstance(subfeature, basestring)
+ assert isinstance(value_string, basestring) or value_string is None
+ if value_string == None:
prefix = ''
else:
prefix = value_string + ':'
@@ -877,10 +917,12 @@ def __get_subfeature_name (subfeature, value_string):
def __validate_feature_attributes (name, attributes):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(attributes, basestring)
for attribute in attributes:
if not attribute in __all_attributes:
raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name))
-
+
if name in __all_features:
raise AlreadyDefined ("feature '%s' already defined" % name)
elif 'implicit' in attributes and 'free' in attributes:
@@ -888,10 +930,11 @@ def __validate_feature_attributes (name, attributes):
elif 'free' in attributes and 'propagated' in attributes:
raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
-
+
def __validate_feature (feature):
""" Generates an error if the feature is unknown.
"""
+ assert isinstance(feature, basestring)
if not __all_features.has_key (feature):
raise BaseException ('unknown feature "%s"' % feature)
@@ -902,6 +945,10 @@ def __select_subfeatures (parent_property, features):
subfeatures of the property's feature which are conditional on the
property's value.
"""
+ if __debug__:
+ from .property import Property
+ assert isinstance(parent_property, Property)
+ assert is_iterable_typed(features, Feature)
return [f for f in features if is_subfeature_of (parent_property, f)]
-
+
# FIXME: copy over tests.
diff --git a/src/build/generators.py b/src/build/generators.py
index dd195a840..f1c514556 100644
--- a/src/build/generators.py
+++ b/src/build/generators.py
@@ -18,13 +18,13 @@
# It starts by selecting 'viable generators', which have any chances of producing
# the desired target type with the required properties. Generators are ranked and
# a set of most specific ones is selected.
-#
+#
# The most specific generators have their 'run' methods called, with the properties
# and list of sources. Each one selects target which can be directly consumed, and
# tries to convert the remaining ones to the types it can consume. This is done
# by recursively calling 'construct' with all consumable types.
#
-# If the generator has collected all the targets it needs, it creates targets
+# If the generator has collected all the targets it needs, it creates targets
# corresponding to result, and returns it. When all generators have been run,
# results of one of them are selected and returned as result.
#
@@ -35,7 +35,7 @@
# Likewise, when generator tries to convert sources to consumable types, it can get
# more targets that it was asked for. The question is what to do with extra targets.
# Boost.Build attempts to convert them to requested types, and attempts as early as
-# possible. Specifically, this is done after invoking each generator. (Later I'll
+# possible. Specifically, this is done after invoking each generator. (Later I'll
# document the rationale for trying extra target conversion at that point).
#
# That early conversion is not always desirable. Suppose a generator got a source of
@@ -52,10 +52,10 @@ import cStringIO
import os.path
from virtual_target import Subvariant
-import virtual_target, type, property_set, property
+from . import virtual_target, type, property_set, property
from b2.util.logger import *
from b2.util.utility import *
-from b2.util import set
+from b2.util import set as set_, is_iterable_typed, is_iterable
from b2.util.sequence import unique
import b2.util.sequence as sequence
from b2.manager import get_manager
@@ -73,8 +73,8 @@ def reset ():
__type_to_generators = {}
__generators_for_toolset = {}
__overrides = {}
-
- # TODO: can these be global?
+
+ # TODO: can these be global?
__construct_stack = []
__viable_generators_cache = {}
__viable_source_types_cache = {}
@@ -95,7 +95,7 @@ __indent = ""
def debug():
global __debug
if __debug is None:
- __debug = "--debug-generators" in bjam.variable("ARGV")
+ __debug = "--debug-generators" in bjam.variable("ARGV")
return __debug
def increase_indent():
@@ -114,7 +114,7 @@ def decrease_indent():
# same generator. Does nothing if a non-derived target type is passed to it.
#
def update_cached_information_with_a_new_type(type):
-
+ assert isinstance(type, basestring)
base_type = b2.build.type.base(type)
if base_type:
@@ -153,7 +153,7 @@ def invalidate_extendable_viable_source_target_type_cache():
__vst_cached_types.append(t)
else:
del __viable_source_types_cache[t]
-
+
def dout(message):
if debug():
print __indent + message
@@ -162,7 +162,7 @@ class Generator:
""" Creates a generator.
manager: the build manager.
id: identifies the generator
-
+
rule: the rule which sets up build actions.
composing: whether generator processes each source target in
@@ -171,49 +171,52 @@ class Generator:
recusrive generators.construct_types call.
source_types (optional): types that this generator can handle
-
+
target_types_and_names: types the generator will create and, optionally, names for
created targets. Each element should have the form
type["(" name-pattern ")"]
for example, obj(%_x). Name of generated target will be found
by replacing % with the name of source, provided explicit name
was not specified.
-
+
requirements (optional)
-
+
NOTE: all subclasses must have a similar signature for clone to work!
"""
def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []):
- assert(not isinstance(source_types, str))
- assert(not isinstance(target_types_and_names, str))
+ assert isinstance(id, basestring)
+ assert isinstance(composing, bool)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types_and_names, basestring)
+ assert is_iterable_typed(requirements, basestring)
self.id_ = id
self.composing_ = composing
self.source_types_ = source_types
self.target_types_and_names_ = target_types_and_names
self.requirements_ = requirements
-
+
self.target_types_ = []
self.name_prefix_ = []
self.name_postfix_ = []
-
+
for e in target_types_and_names:
# Create three parallel lists: one with the list of target types,
- # and two other with prefixes and postfixes to be added to target
+ # and two other with prefixes and postfixes to be added to target
# name. We use parallel lists for prefix and postfix (as opposed
# to mapping), because given target type might occur several times,
# for example "H H(%_symbols)".
m = _re_separate_types_prefix_and_postfix.match (e)
-
+
if not m:
raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
-
+
target_type = m.group (1)
if not target_type: target_type = ''
prefix = m.group (3)
if not prefix: prefix = ''
postfix = m.group (4)
if not postfix: postfix = ''
-
+
self.target_types_.append (target_type)
self.name_prefix_.append (prefix)
self.name_postfix_.append (postfix)
@@ -229,9 +232,11 @@ class Generator:
- id
- value to feature in properties
"""
- return self.__class__ (new_id,
- self.composing_,
- self.source_types_,
+ assert isinstance(new_id, basestring)
+ assert is_iterable_typed(new_toolset_properties, basestring)
+ return self.__class__ (new_id,
+ self.composing_,
+ self.source_types_,
self.target_types_and_names_,
# Note: this does not remove any subfeatures of
# which might cause problems
@@ -241,11 +246,13 @@ class Generator:
"""Creates another generator that is the same as $(self), except that
if 'base' is in target types of $(self), 'type' will in target types
of the new generator."""
+ assert isinstance(base, basestring)
+ assert isinstance(type, basestring)
target_types = []
for t in self.target_types_and_names_:
m = _re_match_type.match(t)
assert m
-
+
if m.group(1) == base:
if m.group(2):
target_types.append(type + m.group(2))
@@ -258,7 +265,7 @@ class Generator:
self.source_types_,
target_types,
self.requirements_)
-
+
def id(self):
return self.id_
@@ -271,28 +278,29 @@ class Generator:
def target_types (self):
""" Returns the list of target types that this generator produces.
It is assumed to be always the same -- i.e. it cannot change depending
- list of sources.
+ list of sources.
"""
return self.target_types_
def requirements (self):
""" Returns the required properties for this generator. Properties
- in returned set must be present in build properties if this
+ in returned set must be present in build properties if this
generator is to be used. If result has grist-only element,
that build properties must include some value of that feature.
"""
return self.requirements_
def match_rank (self, ps):
- """ Returns true if the generator can be run with the specified
+ """ Returns true if the generator can be run with the specified
properties.
"""
# See if generator's requirements are satisfied by
# 'properties'. Treat a feature name in requirements
# (i.e. grist-only element), as matching any value of the
# feature.
+ assert isinstance(ps, property_set.PropertySet)
all_requirements = self.requirements ()
-
+
property_requirements = []
feature_requirements = []
# This uses strings because genenator requirements allow
@@ -304,31 +312,38 @@ class Generator:
else:
feature_requirements.append (r)
-
+
return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \
and all(ps.get(get_grist(s)) for s in feature_requirements)
-
+
def run (self, project, name, prop_set, sources):
""" Tries to invoke this generator on the given sources. Returns a
list of generated targets (instances of 'virtual-target').
project: Project for which the targets are generated.
-
- name: Determines the name of 'name' attribute for
+
+ name: Determines the name of 'name' attribute for
all generated targets. See 'generated_targets' method.
-
+
prop_set: Desired properties for generated targets.
-
+
sources: Source targets.
"""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
if project.manager ().logger ().on ():
project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
-
+
if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
raise BaseException ("Unsupported source/source_type combination")
-
+
# We don't run composing generators if no name is specified. The reason
# is that composing generator combines several targets, which can have
# different names, and it cannot decide which name to give for produced
@@ -337,7 +352,7 @@ class Generator:
# This in effect, means that composing generators are runnable only
# at top-level of transofrmation graph, or if name is passed explicitly.
# Thus, we dissallow composing generators in the middle. For example, the
- # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
+ # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
# (the OBJ -> STATIC_LIB generator is composing)
if not self.composing_ or name:
return self.run_really (project, name, prop_set, sources)
@@ -345,15 +360,21 @@ class Generator:
return []
def run_really (self, project, name, prop_set, sources):
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ # intermediary targets don't have names, so None is possible
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# consumed: Targets that this generator will consume directly.
# bypassed: Targets that can't be consumed and will be returned as-is.
-
+
if self.composing_:
(consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
else:
(consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
-
+
result = []
if consumed:
result = self.construct_result (consumed, project, name, prop_set)
@@ -369,17 +390,23 @@ class Generator:
return result
def construct_result (self, consumed, project, name, prop_set):
- """ Constructs the dependency graph that will be returned by this
+ """ Constructs the dependency graph that will be returned by this
generator.
consumed: Already prepared list of consumable targets
- If generator requires several source files will contain
+ If generator requires several source files will contain
exactly len $(self.source_types_) targets with matching types
- Otherwise, might contain several targets with the type of
+ Otherwise, might contain several targets with the type of
self.source_types_ [0]
project:
name:
prop_set: Properties to be used for all actions create here
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(consumed, virtual_target.VirtualTarget)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(prop_set, property_set.PropertySet)
result = []
# If this is 1->1 transformation, apply it to all consumed targets in order.
if len (self.source_types_) < 2 and not self.composing_:
@@ -395,6 +422,7 @@ class Generator:
return result
def determine_target_name(self, fullname):
+ assert isinstance(fullname, basestring)
# Determine target name from fullname (maybe including path components)
# Place optional prefix and postfix around basename
@@ -415,7 +443,8 @@ class Generator:
def determine_output_name(self, sources):
"""Determine the name of the produced target from the
names of the sources."""
-
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
# The simple case if when a name
# of source has single dot. Then, we take the part before
# dot. Several dots can be caused by:
@@ -426,47 +455,53 @@ class Generator:
# dot. In the second case -- no sure, but for now take
# the part till the last dot too.
name = os.path.splitext(sources[0].name())[0]
-
+
for s in sources[1:]:
n2 = os.path.splitext(s.name())
if n2 != name:
get_manager().errors()(
"%s: source targets have different names: cannot determine target name"
% (self.id_))
-
+
# Names of sources might include directory. We should strip it.
return self.determine_target_name(sources[0].name())
-
-
+
+
def generated_targets (self, sources, prop_set, project, name):
""" Constructs targets that are created after consuming 'sources'.
The result will be the list of virtual-target, which the same length
as 'target_types' attribute and with corresponding types.
-
- When 'name' is empty, all source targets must have the same value of
+
+ When 'name' is empty, all source targets must have the same value of
the 'name' attribute, which will be used instead of the 'name' argument.
-
+
The value of 'name' attribute for each generated target will be equal to
the 'name' parameter if there's no name pattern for this type. Otherwise,
- the '%' symbol in the name pattern will be replaced with the 'name' parameter
+ the '%' symbol in the name pattern will be replaced with the 'name' parameter
to obtain the 'name' attribute.
-
+
For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
basename of a file.
-
+
Note that this pattern mechanism has nothing to do with implicit patterns
- in make. It's a way to produce target which name is different for name of
+ in make. It's a way to produce target which name is different for name of
source.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
if not name:
name = self.determine_output_name(sources)
-
+
# Assign an action for each target
action = self.action_class()
a = action(project.manager(), sources, self.id_, prop_set)
-
+
# Create generated target for each target type.
targets = []
pre = self.name_prefix_
@@ -477,9 +512,9 @@ class Generator:
generated_name = os.path.join(os.path.dirname(name), generated_name)
pre = pre[1:]
post = post[1:]
-
+
targets.append(virtual_target.FileTarget(generated_name, t, project, a))
-
+
return [ project.manager().virtual_targets().register(t) for t in targets ]
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
@@ -489,17 +524,24 @@ class Generator:
only_one: convert 'source' to only one of source types
if there's more that one possibility, report an
error.
-
+
Returns a pair:
- consumed: all targets that can be consumed.
+ consumed: all targets that can be consumed.
bypassed: all targets that cannot be consumed.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(only_one, bool)
consumed = []
bypassed = []
- missing_types = []
+ missing_types = []
if len (sources) > 1:
- # Don't know how to handle several sources yet. Just try
+ # Don't know how to handle several sources yet. Just try
# to pass the request to other generator
missing_types = self.source_types_
@@ -507,26 +549,26 @@ class Generator:
(c, m) = self.consume_directly (sources [0])
consumed += c
missing_types += m
-
+
# No need to search for transformation if
# some source type has consumed source and
# no more source types are needed.
if only_one and consumed:
missing_types = []
-
+
#TODO: we should check that only one source type
#if create of 'only_one' is true.
# TODO: consider if consuned/bypassed separation should
# be done by 'construct_types'.
-
+
if missing_types:
transformed = construct_types (project, name, missing_types, prop_set, sources)
-
+
# Add targets of right type to 'consumed'. Add others to
# 'bypassed'. The 'generators.construct' rule has done
# its best to convert everything to the required type.
# There's no need to rerun it on targets of different types.
-
+
# NOTE: ignoring usage requirements
for t in transformed[1]:
if t.type() in missing_types:
@@ -534,36 +576,45 @@ class Generator:
else:
bypassed.append(t)
-
+
consumed = unique(consumed)
bypassed = unique(bypassed)
-
+
# remove elements of 'bypassed' that are in 'consumed'
-
- # Suppose the target type of current generator, X is produced from
+
+ # Suppose the target type of current generator, X is produced from
# X_1 and X_2, which are produced from Y by one generator.
# When creating X_1 from Y, X_2 will be added to 'bypassed'
# Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
# But they are also in 'consumed'. We have to remove them from
# bypassed, so that generators up the call stack don't try to convert
- # them.
+ # them.
# In this particular case, X_1 instance in 'consumed' and X_1 instance
# in 'bypassed' will be the same: because they have the same source and
# action name, and 'virtual-target.register' won't allow two different
# instances. Therefore, it's OK to use 'set.difference'.
-
+
bypassed = set.difference(bypassed, consumed)
return (consumed, bypassed)
-
+
def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
""" Converts several files to consumable types.
- """
+ """
consumed = []
bypassed = []
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
# We process each source one-by-one, trying to convert it to
# a usable type.
for s in sources:
@@ -578,12 +629,13 @@ class Generator:
return (consumed, bypassed)
def consume_directly (self, source):
+ assert isinstance(source, virtual_target.VirtualTarget)
real_source_type = source.type ()
# If there are no source types, we can consume anything
source_types = self.source_types()
if not source_types:
- source_types = [real_source_type]
+ source_types = [real_source_type]
consumed = []
missing_types = []
@@ -596,9 +648,9 @@ class Generator:
missing_types.append (st)
return (consumed, missing_types)
-
+
def action_class (self):
- """ Returns the class to be used to actions. Default implementation
+ """ Returns the class to be used to actions. Default implementation
returns "action".
"""
return virtual_target.Action
@@ -607,11 +659,13 @@ class Generator:
def find (id):
""" Finds the generator with id. Returns None if not found.
"""
+ assert isinstance(id, basestring)
return __generators.get (id, None)
def register (g):
""" Registers new generator instance 'g'.
"""
+ assert isinstance(g, Generator)
id = g.id()
__generators [id] = g
@@ -660,6 +714,19 @@ def register (g):
invalidate_extendable_viable_source_target_type_cache()
+def check_register_types(fn):
+ def wrapper(id, source_types, target_types, requirements=[]):
+ assert isinstance(id, basestring)
+ assert is_iterable_typed(source_types, basestring)
+ assert is_iterable_typed(target_types, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ return fn(id, source_types, target_types, requirements=requirements)
+ wrapper.__name__ = fn.__name__
+ wrapper.__doc__ = fn.__doc__
+ return wrapper
+
+
+@check_register_types
def register_standard (id, source_types, target_types, requirements = []):
""" Creates new instance of the 'generator' class and registers it.
Returns the creates instance.
@@ -671,6 +738,8 @@ def register_standard (id, source_types, target_types, requirements = []):
register (g)
return g
+
+@check_register_types
def register_composing (id, source_types, target_types, requirements = []):
g = Generator (id, True, source_types, target_types, requirements)
register (g)
@@ -679,6 +748,7 @@ def register_composing (id, source_types, target_types, requirements = []):
def generators_for_toolset (toolset):
""" Returns all generators which belong to 'toolset'.
"""
+ assert isinstance(toolset, basestring)
return __generators_for_toolset.get(toolset, [])
def override (overrider_id, overridee_id):
@@ -687,26 +757,29 @@ def override (overrider_id, overridee_id):
that could produce a target of certain type,
both those generators are amoung viable generators,
the overridden generator is immediately discarded.
-
+
The overridden generators are discarded immediately
after computing the list of viable generators, before
running any of them."""
-
+ assert isinstance(overrider_id, basestring)
+ assert isinstance(overridee_id, basestring)
+
__overrides.setdefault(overrider_id, []).append(overridee_id)
def __viable_source_types_real (target_type):
""" Returns a list of source type which can possibly be converted
to 'target_type' by some chain of generator invocation.
-
+
More formally, takes all generators for 'target_type' and
returns union of source types for those generators and result
of calling itself recusrively on source types.
"""
+ assert isinstance(target_type, basestring)
generators = []
# 't0' is the initial list of target types we need to process to get a list
# of their viable source target types. New target types will not be added to
- # this list.
+ # this list.
t0 = type.all_bases (target_type)
@@ -714,14 +787,14 @@ def __viable_source_types_real (target_type):
# list of their viable source target types. This list will get expanded as
# we locate more target types to process.
t = t0
-
+
result = []
while t:
- # Find all generators for current type.
+ # Find all generators for current type.
# Unlike 'find_viable_generators' we don't care about prop_set.
generators = __type_to_generators.get (t [0], [])
t = t[1:]
-
+
for g in generators:
if not g.source_types():
# Empty source types -- everything can be accepted
@@ -729,7 +802,7 @@ def __viable_source_types_real (target_type):
# This will terminate outer loop.
t = None
break
-
+
for source_type in g.source_types ():
if not source_type in result:
# If generator accepts 'source_type' it
@@ -750,13 +823,14 @@ def __viable_source_types_real (target_type):
if not n in t0:
t.append (n)
result.append (n)
-
+
return result
def viable_source_types (target_type):
""" Helper rule, caches the result of '__viable_source_types_real'.
"""
+ assert isinstance(target_type, basestring)
if not __viable_source_types_cache.has_key(target_type):
__vst_cached_types.append(target_type)
__viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
@@ -767,6 +841,7 @@ def viable_source_types_for_generator_real (generator):
method of 'generator', has some change of being eventually used
(probably after conversion by other generators)
"""
+ assert isinstance(generator, Generator)
source_types = generator.source_types ()
if not source_types:
@@ -791,15 +866,24 @@ def viable_source_types_for_generator_real (generator):
def viable_source_types_for_generator (generator):
""" Caches the result of 'viable_source_types_for_generator'.
"""
+ assert isinstance(generator, Generator)
if not __viable_source_types_cache.has_key(generator):
__vstg_cached_generators.append(generator)
__viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator)
-
+
return __viable_source_types_cache[generator]
def try_one_generator_really (project, name, generator, target_type, properties, sources):
""" Returns usage requirements + list of created targets.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
targets = generator.run (project, name, properties, sources)
usage_requirements = []
@@ -809,7 +893,7 @@ def try_one_generator_really (project, name, generator, target_type, properties,
if targets:
success = True;
-
+
if isinstance (targets[0], property_set.PropertySet):
usage_requirements = targets [0]
targets = targets [1]
@@ -818,7 +902,7 @@ def try_one_generator_really (project, name, generator, target_type, properties,
usage_requirements = property_set.empty ()
dout( " generator" + generator.id() + " spawned ")
- # generators.dout [ indent ] " " $(targets) ;
+ # generators.dout [ indent ] " " $(targets) ;
# if $(usage-requirements)
# {
# generators.dout [ indent ] " with usage requirements:" $(x) ;
@@ -834,21 +918,29 @@ def try_one_generator (project, name, generator, target_type, properties, source
to fail. If so, quickly returns empty list. Otherwise, calls
try_one_generator_really.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(generator, Generator)
+ assert isinstance(target_type, basestring)
+ assert isinstance(properties, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
source_types = []
for s in sources:
source_types.append (s.type ())
viable_source_types = viable_source_types_for_generator (generator)
-
+
if source_types and viable_source_types != ['*'] and\
- not set.intersection (source_types, viable_source_types):
+ not set_.intersection (source_types, viable_source_types):
if project.manager ().logger ().on ():
- id = generator.id ()
+ id = generator.id ()
project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
-
+
return []
else:
@@ -856,10 +948,18 @@ def try_one_generator (project, name, generator, target_type, properties, source
def construct_types (project, name, target_types, prop_set, sources):
-
+
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert is_iterable_typed(target_types, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+
result = []
usage_requirements = property_set.empty()
-
+
for t in target_types:
r = construct (project, name, t, prop_set, sources)
@@ -870,7 +970,7 @@ def construct_types (project, name, target_types, prop_set, sources):
# TODO: have to introduce parameter controlling if
# several types can be matched and add appropriate
- # checks
+ # checks
# TODO: need to review the documentation for
# 'construct' to see if it should return $(source) even
@@ -883,9 +983,10 @@ def construct_types (project, name, target_types, prop_set, sources):
return (usage_requirements, sources)
def __ensure_type (targets):
- """ Ensures all 'targets' have types. If this is not so, exists with
+ """ Ensures all 'targets' have types. If this is not so, exists with
error.
"""
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
for t in targets:
if not t.type ():
get_manager().errors()("target '%s' has no type" % str (t))
@@ -898,24 +999,26 @@ def find_viable_generators_aux (target_type, prop_set):
- for each type find all generators that generate that type and which requirements
are satisfied by properties.
- if the set of generators is not empty, returns that set.
-
+
Note: this algorithm explicitly ignores generators for base classes if there's
at least one generator for requested target_type.
"""
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
# Select generators that can create the required target type.
viable_generators = []
initial_generators = []
- import type
+ from . import type
# Try all-type generators first. Assume they have
# quite specific requirements.
all_bases = type.all_bases(target_type)
-
+
for t in all_bases:
-
+
initial_generators = __type_to_generators.get(t, [])
-
+
if initial_generators:
dout("there are generators for this type")
if t != target_type:
@@ -933,22 +1036,24 @@ def find_viable_generators_aux (target_type, prop_set):
ng = g.clone_and_change_target_type(t, target_type)
generators2.append(ng)
register(ng)
-
+
initial_generators = generators2
break
-
+
for g in initial_generators:
dout("trying generator " + g.id()
+ "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
-
+
m = g.match_rank(prop_set)
if m:
dout(" is viable")
- viable_generators.append(g)
-
+ viable_generators.append(g)
+
return viable_generators
def find_viable_generators (target_type, prop_set):
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
key = target_type + '.' + str (prop_set)
l = __viable_generators_cache.get (key, None)
@@ -971,15 +1076,15 @@ def find_viable_generators (target_type, prop_set):
# Generators which override 'all'.
all_overrides = []
-
+
# Generators which are overriden
- overriden_ids = []
+ overriden_ids = []
for g in viable_generators:
id = g.id ()
-
+
this_overrides = __overrides.get (id, [])
-
+
if this_overrides:
overriden_ids.extend (this_overrides)
if 'all' in this_overrides:
@@ -989,24 +1094,31 @@ def find_viable_generators (target_type, prop_set):
viable_generators = all_overrides
return [g for g in viable_generators if not g.id() in overriden_ids]
-
+
def __construct_really (project, name, target_type, prop_set, sources):
""" Attempts to construct target by finding viable generators, running them
and selecting the dependency graph.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
viable_generators = find_viable_generators (target_type, prop_set)
-
+
result = []
dout(" *** %d viable generators" % len (viable_generators))
generators_that_succeeded = []
-
+
for g in viable_generators:
- __active_generators.append(g)
+ __active_generators.append(g)
r = try_one_generator (project, name, g, target_type, prop_set, sources)
del __active_generators[-1]
-
+
if r:
generators_that_succeeded.append(g)
if result:
@@ -1027,7 +1139,7 @@ def __construct_really (project, name, target_type, prop_set, sources):
get_manager().errors()(output.getvalue())
else:
result = r;
-
+
return result;
@@ -1036,19 +1148,26 @@ def construct (project, name, target_type, prop_set, sources, top_level=False):
from 'sources'. The 'sources' are treated as a collection of
*possible* ingridients -- i.e. it is not required to consume
them all. If 'multiple' is true, the rule is allowed to return
- several targets of 'target-type'.
-
+ several targets of 'target-type'.
+
Returns a list of target. When this invocation is first instance of
'construct' in stack, returns only targets of requested 'target-type',
otherwise, returns also unused sources and additionally generated
targets.
-
+
If 'top-level' is set, does not suppress generators that are already
used in the stack. This may be useful in cases where a generator
has to build a metatarget -- for example a target corresponding to
- built tool.
+ built tool.
"""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(name, basestring) or name is None
+ assert isinstance(target_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(top_level, bool)
global __active_generators
if top_level:
saved_active = __active_generators
@@ -1057,23 +1176,23 @@ def construct (project, name, target_type, prop_set, sources, top_level=False):
global __construct_stack
if not __construct_stack:
__ensure_type (sources)
-
+
__construct_stack.append (1)
increase_indent ()
if project.manager().logger().on():
dout( "*** construct " + target_type)
-
+
for s in sources:
dout(" from " + str(s))
project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
-
+
result = __construct_really(project, name, target_type, prop_set, sources)
decrease_indent()
-
+
__construct_stack = __construct_stack [1:]
if top_level:
@@ -1086,7 +1205,7 @@ def add_usage_requirements (result, raw_properties):
if isinstance (result[0], property_set.PropertySet):
return (result[0].add_raw(raw_properties), result[1])
else:
- return (propery_set.create(raw-properties), result)
+ return (property_set.create(raw_properties), result)
#if [ class.is-a $(result[1]) : property-set ]
#{
# return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ;
diff --git a/src/build/project.jam b/src/build/project.jam
index 83d0377e4..2b3386a13 100644
--- a/src/build/project.jam
+++ b/src/build/project.jam
@@ -96,7 +96,7 @@ rule load-used-projects ( module-name )
# 'jamroot'. With the latter, we would get duplicate matches on Windows and
# would have to eliminate duplicates.
JAMROOT ?= [ modules.peek : JAMROOT ] ;
-JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ;
+JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot. [Jj]amroot.jam ;
# Loads parent of Jamfile at 'location'. Issues an error if nothing is found.
@@ -191,7 +191,7 @@ rule module-name ( jamfile-location )
# Default patterns to search for the Jamfiles to use for build declarations.
#
JAMFILE = [ modules.peek : JAMFILE ] ;
-JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ;
+JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile. [Jj]amfile.jam ;
# Find the Jamfile at the given location. This returns the exact names of all
@@ -1001,6 +1001,32 @@ rule glob-internal ( project : wildcards + : excludes * : rule-name )
}
+rule glob-path-root ( root path )
+{
+ return [ path.root $(path) $(root) ] ;
+}
+
+rule glob-internal-ex ( project : paths + : wildcards + : excludes * : rule-name )
+{
+ # Make the paths we search in absolute, if they aren't already absolute.
+ # If the given paths are relative, they will be relative to the source
+ # directory. So that's what we root against.
+ local source-location
+ = [ path.root [ $(project).get source-location ] [ path.pwd ] ] ;
+ local search-paths
+ = [ sequence.transform project.glob-path-root $(source-location) : $(paths) ] ;
+ paths
+ = [ path.$(rule-name) $(search-paths) : $(wildcards) : $(excludes) ] ;
+ # The paths we have found are absolute, but the names specified in the
+ # sources list are assumed to be relative to the source directory of the
+ # corresponding project. Make the results relative to the source again.
+ local result
+ = [ sequence.transform path.relative-to $(source-location) : $(paths) ] ;
+
+ return $(result) ;
+}
+
+
# This module defines rules common to all projects.
#
module project-rules
@@ -1211,6 +1237,20 @@ module project-rules
$(excludes) : glob-tree ] ;
}
+ rule glob-ex ( paths + : wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal-ex [ project.current ]
+ : $(paths) : $(wildcards) : $(excludes) : glob ] ;
+ }
+
+ rule glob-tree-ex ( paths + : wildcards + : excludes * )
+ {
+ import project ;
+ return [ project.glob-internal-ex [ project.current ]
+ : $(paths) : $(wildcards) : $(excludes) : glob-tree ] ;
+ }
+
# Calculates conditional requirements for multiple requirements at once.
# This is a shorthand to reduce duplication and to keep an inline
# declarative syntax. For example:
diff --git a/src/build/project.py b/src/build/project.py
index 71bc33fb3..ea8fe0106 100644
--- a/src/build/project.py
+++ b/src/build/project.py
@@ -40,9 +40,10 @@
# their project id.
import b2.util.path
+import b2.build.targets
from b2.build import property_set, property
from b2.build.errors import ExceptionWithUserContext
-import b2.build.targets
+from b2.manager import get_manager
import bjam
import b2
@@ -56,7 +57,10 @@ import imp
import traceback
import b2.util.option as option
-from b2.util import record_jam_to_value_mapping, qualify_jam_action
+from b2.util import (
+ record_jam_to_value_mapping, qualify_jam_action, is_iterable_typed, bjam_signature,
+ is_iterable)
+
class ProjectRegistry:
@@ -130,6 +134,7 @@ class ProjectRegistry:
file and jamfile needed by the loaded one will be loaded recursively.
If the jamfile at that location is loaded already, does nothing.
Returns the project module for the Jamfile."""
+ assert isinstance(jamfile_location, basestring)
absolute = os.path.join(os.getcwd(), jamfile_location)
absolute = os.path.normpath(absolute)
@@ -159,6 +164,7 @@ class ProjectRegistry:
return mname
def load_used_projects(self, module_name):
+ assert isinstance(module_name, basestring)
# local used = [ modules.peek $(module-name) : .used-projects ] ;
used = self.used_projects[module_name]
@@ -172,7 +178,7 @@ class ProjectRegistry:
def load_parent(self, location):
"""Loads parent of Jamfile at 'location'.
Issues an error if nothing is found."""
-
+ assert isinstance(location, basestring)
found = b2.util.path.glob_in_parents(
location, self.JAMROOT + self.JAMFILE)
@@ -187,6 +193,8 @@ class ProjectRegistry:
"""Given 'name' which can be project-id or plain directory name,
return project module corresponding to that id or directory.
Returns nothing of project is not found."""
+ assert isinstance(name, basestring)
+ assert isinstance(current_location, basestring)
project_module = None
@@ -214,6 +222,7 @@ class ProjectRegistry:
"""Returns the name of module corresponding to 'jamfile-location'.
If no module corresponds to location yet, associates default
module name with that location."""
+ assert isinstance(jamfile_location, basestring)
module = self.location2module.get(jamfile_location)
if not module:
# Root the path, so that locations are always umbiguious.
@@ -230,6 +239,9 @@ class ProjectRegistry:
exact names of all the Jamfiles in the given directory. The optional
parent-root argument causes this to search not the given directory
but the ones above it up to the directory given in it."""
+ assert isinstance(dir, basestring)
+ assert isinstance(parent_root, (int, bool))
+ assert isinstance(no_errors, (int, bool))
# Glob for all the possible Jamfiles according to the match pattern.
#
@@ -280,6 +292,8 @@ Please consult the documentation at 'http://boost.org/boost-build2'."""
"""Load a Jamfile at the given directory. Returns nothing.
Will attempt to load the file as indicated by the JAMFILE patterns.
Effect of calling this rule twice with the same 'dir' is underfined."""
+ assert isinstance(dir, basestring)
+ assert isinstance(jamfile_module, basestring)
# See if the Jamfile is where it should be.
is_jamroot = False
@@ -359,12 +373,15 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
The caller is required to never call this method twice on
the same file.
"""
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(file, basestring)
self.used_projects[jamfile_module] = []
bjam.call("load", jamfile_module, file)
self.load_used_projects(jamfile_module)
def is_jamroot(self, basename):
+ assert isinstance(basename, basestring)
match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
if match:
return 1
@@ -378,7 +395,9 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
location is the location (directory) of the project to initialize.
If not specified, standalone project will be initialized
"""
-
+ assert isinstance(module_name, basestring)
+ assert isinstance(location, basestring) or location is None
+ assert isinstance(basename, basestring) or basename is None
if "--debug-loading" in self.manager.argv():
print "Initializing project '%s'" % module_name
@@ -465,6 +484,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def inherit_attributes(self, project_module, parent_module):
"""Make 'project-module' inherit attributes of project
root and parent module."""
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_module, basestring)
attributes = self.module2attributes[project_module]
pattributes = self.module2attributes[parent_module]
@@ -502,6 +523,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def register_id(self, id, module):
"""Associate the given id with the given project module."""
+ assert isinstance(id, basestring)
+ assert isinstance(module, basestring)
self.id2module[id] = module
def current(self):
@@ -509,11 +532,17 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
return self.current_project
def set_current(self, c):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(c, ProjectTarget)
self.current_project = c
def push_current(self, project):
"""Temporary changes the current project to 'project'. Should
be followed by 'pop-current'."""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
self.saved_current_project.append(self.current_project)
self.current_project = project
@@ -524,11 +553,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def attributes(self, project):
"""Returns the project-attribute instance for the
specified jamfile module."""
+ assert isinstance(project, basestring)
return self.module2attributes[project]
def attribute(self, project, attribute):
"""Returns the value of the specified attribute in the
specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
try:
return self.module2attributes[project].get(attribute)
except:
@@ -537,10 +569,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def attributeDefault(self, project, attribute, default):
"""Returns the value of the specified attribute in the
specified jamfile module."""
+ assert isinstance(project, basestring)
+ assert isinstance(attribute, basestring)
+ assert isinstance(default, basestring) or default is None
return self.module2attributes[project].getDefault(attribute, default)
def target(self, project_module):
"""Returns the project target corresponding to the 'project-module'."""
+ assert isinstance(project_module, basestring)
if not self.module2target.has_key(project_module):
self.module2target[project_module] = \
b2.build.targets.ProjectTarget(project_module, project_module,
@@ -550,6 +586,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
def use(self, id, location):
# Use/load a project.
+ assert isinstance(id, basestring)
+ assert isinstance(location, basestring)
saved_project = self.current_project
project_module = self.load(location)
declared_id = self.attributeDefault(project_module, "id", "")
@@ -564,16 +602,24 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
self.current_module = saved_project
- def add_rule(self, name, callable):
+ def add_rule(self, name, callable_):
"""Makes rule 'name' available to all subsequently loaded Jamfiles.
Calling that rule wil relay to 'callable'."""
- self.project_rules_.add_rule(name, callable)
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.project_rules_.add_rule(name, callable_)
def project_rules(self):
return self.project_rules_
def glob_internal(self, project, wildcards, excludes, rule_name):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
+ assert isinstance(rule_name, basestring)
location = project.get("source-location")[0]
result = []
@@ -656,6 +702,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project))
since then we might get naming conflicts between standard
Python modules and those.
"""
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(extra_path, basestring) or extra_path is None
# See if we loaded module of this name already
existing = self.loaded_tool_modules_.get(name)
if existing:
@@ -774,7 +822,20 @@ class ProjectAttributes:
def set(self, attribute, specification, exact=False):
"""Set the named attribute from the specification given by the user.
The value actually set may be different."""
-
+ assert isinstance(attribute, basestring)
+ assert isinstance(exact, (int, bool))
+ if __debug__ and not exact:
+ if attribute == 'requirements':
+ assert (isinstance(specification, property_set.PropertySet)
+ or all(isinstance(s, basestring) for s in specification))
+ elif attribute in (
+ 'usage-requirements', 'default-build', 'source-location', 'build-dir', 'id'):
+ assert is_iterable_typed(specification, basestring)
+ elif __debug__:
+ assert (
+ isinstance(specification, (property_set.PropertySet, type(None), basestring))
+ or all(isinstance(s, basestring) for s in specification)
+ )
if exact:
self.__dict__[attribute] = specification
@@ -838,9 +899,11 @@ for project at '%s'""" % (attribute, self.location))
self.__dict__[attribute] = specification
def get(self, attribute):
+ assert isinstance(attribute, basestring)
return self.__dict__[attribute]
def getDefault(self, attribute, default):
+ assert isinstance(attribute, basestring)
return self.__dict__.get(attribute, default)
def dump(self):
@@ -876,41 +939,51 @@ class ProjectRules:
"error_reporting_wrapper", "add_rule_for_type", "reverse"]]
self.all_names_ = [x for x in self.local_names]
- def _import_rule(self, bjam_module, name, callable):
- if hasattr(callable, "bjam_signature"):
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature)
+ def _import_rule(self, bjam_module, name, callable_):
+ assert isinstance(bjam_module, basestring)
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ if hasattr(callable_, "bjam_signature"):
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_), callable_.bjam_signature)
else:
- bjam.import_rule(bjam_module, name, self.make_wrapper(callable))
+ bjam.import_rule(bjam_module, name, self.make_wrapper(callable_))
def add_rule_for_type(self, type):
+ assert isinstance(type, basestring)
rule_name = type.lower().replace("_", "-")
- def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []):
+ @bjam_signature([['name'], ['sources', '*'], ['requirements', '*'],
+ ['default_build', '*'], ['usage_requirements', '*']])
+ def xpto (name, sources=[], requirements=[], default_build=[], usage_requirements=[]):
+
return self.manager_.targets().create_typed_target(
- type, self.registry.current(), name[0], sources,
+ type, self.registry.current(), name, sources,
requirements, default_build, usage_requirements)
self.add_rule(rule_name, xpto)
- def add_rule(self, name, callable):
- self.rules[name] = callable
+ def add_rule(self, name, callable_):
+ assert isinstance(name, basestring)
+ assert callable(callable_)
+ self.rules[name] = callable_
self.all_names_.append(name)
# Add new rule at global bjam scope. This might not be ideal,
# added because if a jamroot does 'import foo' where foo calls
# add_rule, we need to import new rule to jamroot scope, and
# I'm lazy to do this now.
- self._import_rule("", name, callable)
+ self._import_rule("", name, callable_)
def all_names(self):
return self.all_names_
- def call_and_report_errors(self, callable, *args, **kw):
+ def call_and_report_errors(self, callable_, *args, **kw):
+ assert callable(callable_)
result = None
try:
self.manager_.errors().push_jamfile_context()
- result = callable(*args, **kw)
+ result = callable_(*args, **kw)
except ExceptionWithUserContext, e:
e.report()
except Exception, e:
@@ -923,16 +996,18 @@ class ProjectRules:
return result
- def make_wrapper(self, callable):
+ def make_wrapper(self, callable_):
"""Given a free-standing function 'callable', return a new
callable that will call 'callable' and report all exceptins,
using 'call_and_report_errors'."""
+ assert callable(callable_)
def wrapper(*args, **kw):
- return self.call_and_report_errors(callable, *args, **kw)
+ return self.call_and_report_errors(callable_, *args, **kw)
return wrapper
def init_project(self, project_module, python_standalone=False):
-
+ assert isinstance(project_module, basestring)
+ assert isinstance(python_standalone, bool)
if python_standalone:
m = sys.modules[project_module]
@@ -961,7 +1036,7 @@ class ProjectRules:
self._import_rule(project_module, n, self.rules[n])
def project(self, *args):
-
+ assert is_iterable(args) and all(is_iterable(arg) for arg in args)
jamfile_module = self.registry.current().project_module()
attributes = self.registry.attributes(jamfile_module)
@@ -1017,7 +1092,8 @@ attribute is allowed only for top-level 'project' invocations""")
"""Declare and set a project global constant.
Project global constants are normal variables but should
not be changed. They are applied to every child Jamfile."""
- m = "Jamfile"
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
self.registry.current().add_constant(name[0], value)
def path_constant(self, name, value):
@@ -1025,6 +1101,8 @@ attribute is allowed only for top-level 'project' invocations""")
path is adjusted to be relative to the invocation directory. The given
value path is taken to be either absolute, or relative to this project
root."""
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(value, basestring)
if len(value) > 1:
self.registry.manager.error()("path constant should have one element")
self.registry.current().add_constant(name[0], value[0], path=1)
@@ -1032,27 +1110,35 @@ attribute is allowed only for top-level 'project' invocations""")
def use_project(self, id, where):
# See comment in 'load' for explanation why we record the
# parameters as opposed to loading the project now.
- m = self.registry.current().project_module();
+ assert is_iterable_typed(id, basestring)
+ assert is_iterable_typed(where, basestring)
+ m = self.registry.current().project_module()
self.registry.used_projects[m].append((id[0], where[0]))
def build_project(self, dir):
- assert(isinstance(dir, list))
+ assert is_iterable_typed(dir, basestring)
jamfile_module = self.registry.current().project_module()
attributes = self.registry.attributes(jamfile_module)
now = attributes.get("projects-to-build")
attributes.set("projects-to-build", now + dir, exact=True)
def explicit(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.registry.current().mark_targets_as_explicit(target_names)
def always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.registry.current().mark_targets_as_alays(target_names)
def glob(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring)or excludes is None
return self.registry.glob_internal(self.registry.current(),
wildcards, excludes, "glob")
def glob_tree(self, wildcards, excludes=None):
+ assert is_iterable_typed(wildcards, basestring)
+ assert is_iterable_typed(excludes, basestring) or excludes is None
bad = 0
for p in wildcards:
if os.path.dirname(p):
@@ -1076,6 +1162,7 @@ attribute is allowed only for top-level 'project' invocations""")
# will expect the module to be found even though
# the directory is not in BOOST_BUILD_PATH.
# So temporary change the search path.
+ assert is_iterable_typed(toolset, basestring)
current = self.registry.current()
location = current.get('location')
@@ -1090,7 +1177,9 @@ attribute is allowed only for top-level 'project' invocations""")
self.registry.set_current(current)
def import_(self, name, names_to_import=None, local_names=None):
-
+ assert is_iterable_typed(name, basestring)
+ assert is_iterable_typed(names_to_import, basestring) or names_to_import is None
+ assert is_iterable_typed(local_names, basestring)or local_names is None
name = name[0]
py_name = name
if py_name == "os":
@@ -1133,7 +1222,8 @@ attribute is allowed only for top-level 'project' invocations""")
lib x : x.cpp : [ conditional gcc debug :
DEBUG_EXCEPTION DEBUG_TRACE ] ;
"""
-
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable_typed(requirements, basestring)
c = string.join(condition, ",")
if c.find(":") != -1:
return [c + r for r in requirements]
@@ -1141,6 +1231,8 @@ attribute is allowed only for top-level 'project' invocations""")
return [c + ":" + r for r in requirements]
def option(self, name, value):
+ assert is_iterable(name) and isinstance(name[0], basestring)
+ assert is_iterable(value) and isinstance(value[0], basestring)
name = name[0]
if not name in ["site-config", "user-config", "project-config"]:
get_manager().errors()("The 'option' rule may be used only in site-config or user-config")
diff --git a/src/build/property.jam b/src/build/property.jam
index ff28dfd20..78a9744b1 100644
--- a/src/build/property.jam
+++ b/src/build/property.jam
@@ -237,12 +237,15 @@ rule as-path ( properties * )
local components ;
for local p in $(properties)
{
- if $(p:G)
+ if ! hidden in [ feature.attributes $(p:G) ]
{
- local f = [ utility.ungrist $(p:G) ] ;
- p = $(f)-$(p:G=) ;
+ if $(p:G)
+ {
+ local f = [ utility.ungrist $(p:G) ] ;
+ p = $(f)-$(p:G=) ;
+ }
+ components += [ $(.abbrev) $(p) ] ;
}
- components += [ $(.abbrev) $(p) ] ;
}
$(entry) = $(components:J=/) ;
diff --git a/src/build/property.py b/src/build/property.py
index f851c9e5e..11a18ff38 100644
--- a/src/build/property.py
+++ b/src/build/property.py
@@ -1,17 +1,17 @@
# Status: ported, except for tests.
# Base revision: 64070
#
-# Copyright 2001, 2002, 2003 Dave Abrahams
-# Copyright 2006 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2001, 2002, 2003 Dave Abrahams
+# Copyright 2006 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
import sys
from b2.util.utility import *
from b2.build import feature
-from b2.util import sequence, qualify_jam_action
+from b2.util import sequence, qualify_jam_action, is_iterable_typed
import b2.util.set
from b2.manager import get_manager
@@ -41,7 +41,7 @@ class Property(object):
self._feature = f
self._value = value
self._condition = condition
-
+
def feature(self):
return self._feature
@@ -70,7 +70,9 @@ class Property(object):
def create_from_string(s, allow_condition=False,allow_missing_value=False):
-
+ assert isinstance(s, basestring)
+ assert isinstance(allow_condition, bool)
+ assert isinstance(allow_missing_value, bool)
condition = []
import types
if not isinstance(s, types.StringType):
@@ -92,7 +94,7 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False):
if feature.is_implicit_value(s):
f = feature.implied_feature(s)
value = s
- else:
+ else:
raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
else:
if feature.valid(feature_name):
@@ -119,11 +121,11 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False):
if condition:
condition = [create_from_string(x) for x in condition.split(',')]
-
+
return Property(f, value, condition)
def create_from_strings(string_list, allow_condition=False):
-
+ assert is_iterable_typed(string_list, basestring)
return [create_from_string(s, allow_condition) for s in string_list]
def reset ():
@@ -153,7 +155,7 @@ def path_order (x, y):
"""
if x == y:
return 0
-
+
xg = get_grist (x)
yg = get_grist (y)
@@ -164,10 +166,10 @@ def path_order (x, y):
return 1
else:
- if not xg:
+ if not xg:
x = feature.expand_subfeatures([x])
y = feature.expand_subfeatures([y])
-
+
if x < y:
return -1
elif x > y:
@@ -176,21 +178,23 @@ def path_order (x, y):
return 0
def identify(string):
- return string
+ return string
# Uses Property
def refine (properties, requirements):
- """ Refines 'properties' by overriding any non-free properties
- for which a different value is specified in 'requirements'.
+ """ Refines 'properties' by overriding any non-free properties
+ for which a different value is specified in 'requirements'.
Conditional requirements are just added without modification.
Returns the resulting list of properties.
"""
+ assert is_iterable_typed(properties, Property)
+ assert is_iterable_typed(requirements, Property)
# The result has no duplicates, so we store it in a set
result = set()
-
+
# Records all requirements.
required = {}
-
+
# All the elements of requirements should be present in the result
# Record them so that we can handle 'properties'.
for r in requirements:
@@ -224,14 +228,14 @@ def translate_paths (properties, path):
if p.feature().path():
values = __re_two_ampersands.split(p.value())
-
+
new_value = "&&".join(os.path.join(path, v) for v in values)
if new_value != p.value():
result.append(Property(p.feature(), new_value, p.condition()))
else:
result.append(p)
-
+
else:
result.append (p)
@@ -242,6 +246,8 @@ def translate_indirect(properties, context_module):
names of rules, used in 'context-module'. Such rules can be
either local to the module or global. Qualified local rules
with the name of the module."""
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context_module, basestring)
result = []
for p in properties:
if p.value()[0] == '@':
@@ -257,15 +263,14 @@ def validate (properties):
""" Exit with error if any of the properties is not valid.
properties may be a single property or a sequence of properties.
"""
-
- if isinstance (properties, str):
- __validate1 (properties)
- else:
- for p in properties:
- __validate1 (p)
+ if isinstance(properties, Property):
+ properties = [properties]
+ assert is_iterable_typed(properties, Property)
+ for p in properties:
+ __validate1(p)
def expand_subfeatures_in_conditions (properties):
-
+ assert is_iterable_typed(properties, Property)
result = []
for p in properties:
@@ -296,8 +301,9 @@ def split_conditional (property):
debug,gcc full.
Otherwise, returns empty string.
"""
+ assert isinstance(property, basestring)
m = __re_split_conditional.match (property)
-
+
if m:
return (m.group (1), '<' + m.group (2))
@@ -307,14 +313,18 @@ def split_conditional (property):
def select (features, properties):
""" Selects properties which correspond to any of the given features.
"""
+ assert is_iterable_typed(properties, basestring)
result = []
-
+
# add any missing angle brackets
features = add_grist (features)
return [p for p in properties if get_grist(p) in features]
def validate_property_sets (sets):
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(sets, PropertySet)
for s in sets:
validate(s.all())
@@ -323,6 +333,10 @@ def evaluate_conditionals_in_context (properties, context):
For those with met conditions, removes the condition. Properies
in conditions are looked up in 'context'
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(context, PropertySet)
base = []
conditional = []
@@ -348,8 +362,11 @@ def change (properties, feature, value = None):
given feature replaced by the given value.
If 'value' is None the feature will be removed.
"""
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(feature, basestring)
+ assert isinstance(value, (basestring, type(None)))
result = []
-
+
feature = add_grist (feature)
for p in properties:
@@ -368,7 +385,8 @@ def change (properties, feature, value = None):
def __validate1 (property):
""" Exit with error if property is not valid.
- """
+ """
+ assert isinstance(property, Property)
msg = None
if not property.feature().free():
@@ -379,7 +397,7 @@ def __validate1 (property):
# Still to port.
# Original lines are prefixed with "# "
#
-#
+#
# import utility : ungrist ;
# import sequence : unique ;
# import errors : error ;
@@ -389,8 +407,8 @@ def __validate1 (property):
# import set ;
# import path ;
# import assert ;
-#
-#
+#
+#
# rule validate-property-sets ( property-sets * )
@@ -405,7 +423,10 @@ def __validate1 (property):
def remove(attributes, properties):
"""Returns a property sets which include all the elements
in 'properties' that do not have attributes listed in 'attributes'."""
-
+ if isinstance(attributes, basestring):
+ attributes = [attributes]
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
attributes_new = feature.attributes(get_grist(e))
@@ -424,6 +445,8 @@ def remove(attributes, properties):
def take(attributes, properties):
"""Returns a property set which include all
properties in 'properties' that have any of 'attributes'."""
+ assert is_iterable_typed(attributes, basestring)
+ assert is_iterable_typed(properties, basestring)
result = []
for e in properties:
if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))):
@@ -431,7 +454,9 @@ def take(attributes, properties):
return result
def translate_dependencies(properties, project_id, location):
-
+ assert is_iterable_typed(properties, Property)
+ assert isinstance(project_id, basestring)
+ assert isinstance(location, basestring)
result = []
for p in properties:
@@ -447,10 +472,10 @@ def translate_dependencies(properties, project_id, location):
pass
else:
rooted = os.path.join(os.getcwd(), location, rooted)
-
+
result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition()))
-
- elif os.path.isabs(v):
+
+ elif os.path.isabs(v):
result.append(p)
else:
result.append(Property(p.feature(), project_id + "//" + v, p.condition()))
@@ -464,10 +489,12 @@ class PropertyMap:
def __init__ (self):
self.__properties = []
self.__values = []
-
+
def insert (self, properties, value):
""" Associate value with properties.
"""
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, basestring)
self.__properties.append(properties)
self.__values.append(value)
@@ -477,15 +504,18 @@ class PropertyMap:
subset has value assigned to it, return the
value for the longest subset, if it's unique.
"""
+ assert is_iterable_typed(properties, basestring)
return self.find_replace (properties)
def find_replace(self, properties, value=None):
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(value, (basestring, type(None)))
matches = []
match_ranks = []
-
+
for i in range(0, len(self.__properties)):
p = self.__properties[i]
-
+
if b2.util.set.contains (p, properties):
matches.append (i)
match_ranks.append(len(p))
@@ -499,7 +529,7 @@ class PropertyMap:
raise NoBestMatchingAlternative ()
best = best [0]
-
+
original = self.__values[best]
if value:
@@ -512,12 +542,12 @@ class PropertyMap:
# import errors : try catch ;
# import feature ;
# import feature : feature subfeature compose ;
-#
+#
# # local rules must be explicitly re-imported
# import property : path-order ;
-#
+#
# feature.prepare-test property-test-temp ;
-#
+#
# feature toolset : gcc : implicit symmetric ;
# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
# 3.0 3.0.1 3.0.2 : optional ;
@@ -526,98 +556,98 @@ class PropertyMap:
# feature optimization : on off ;
# feature variant : debug release : implicit composite symmetric ;
# feature rtti : on off : link-incompatible ;
-#
+#
# compose debug : _DEBUG off ;
# compose release : NDEBUG on ;
-#
+#
# import assert ;
# import "class" : new ;
-#
+#
# validate gcc gcc-3.0.1 : $(test-space) ;
-#
+#
# assert.result gcc off FOO
# : refine gcc off
# : FOO
# : $(test-space)
# ;
-#
+#
# assert.result gcc on
# : refine gcc off
# : on
# : $(test-space)
# ;
-#
+#
# assert.result gcc off
# : refine gcc : off : $(test-space)
# ;
-#
+#
# assert.result gcc off off:FOO
-# : refine gcc : off off:FOO
+# : refine gcc : off off:FOO
# : $(test-space)
# ;
-#
-# assert.result gcc:foo gcc:bar
-# : refine gcc:foo : gcc:bar
+#
+# assert.result gcc:foo gcc:bar
+# : refine gcc:foo : gcc:bar
# : $(test-space)
# ;
-#
+#
# assert.result MY_RELEASE
-# : evaluate-conditionals-in-context
+# : evaluate-conditionals-in-context
# release,off:MY_RELEASE
# : gcc release off
-#
+#
# ;
-#
+#
# try ;
# validate value : $(test-space) ;
# catch "Invalid property 'value': unknown feature 'feature'." ;
-#
+#
# try ;
# validate default : $(test-space) ;
# catch \"default\" is not a known value of feature ;
-#
+#
# validate WHATEVER : $(test-space) ;
-#
+#
# try ;
# validate : $(test-space) ;
# catch "Invalid property '': No value specified for feature 'rtti'." ;
-#
+#
# try ;
# validate value : $(test-space) ;
# catch "value" is not a value of an implicit feature ;
-#
-#
-# assert.result on
+#
+#
+# assert.result on
# : remove free implicit : gcc foo on : $(test-space) ;
-#
-# assert.result a
+#
+# assert.result a
# : select include : a gcc ;
-#
-# assert.result a
+#
+# assert.result a
# : select include bar : a gcc ;
-#
+#
# assert.result a gcc
# : select include : a gcc ;
-#
-# assert.result kylix a
+#
+# assert.result kylix a
# : change gcc a : kylix ;
-#
-# # Test ordinary properties
-# assert.result
-# : split-conditional gcc
+#
+# # Test ordinary properties
+# assert.result
+# : split-conditional gcc
# ;
-#
+#
# # Test properties with ":"
# assert.result
# : split-conditional FOO=A::B
# ;
-#
+#
# # Test conditional feature
# assert.result gcc,3.0 FOO
# : split-conditional gcc,3.0:FOO
# ;
-#
+#
# feature.finish-test property-test-temp ;
# }
-#
-
+#
+
diff --git a/src/build/property_set.py b/src/build/property_set.py
index 37fe46631..494a5b1b7 100644
--- a/src/build/property_set.py
+++ b/src/build/property_set.py
@@ -8,6 +8,7 @@
import hashlib
+import bjam
from b2.util.utility import *
import property, feature
import b2.build.feature
@@ -15,7 +16,7 @@ from b2.exceptions import *
from b2.build.property import get_abbreviated_paths
from b2.util.sequence import unique
from b2.util.set import difference
-from b2.util import cached, abbreviate_dashed
+from b2.util import cached, abbreviate_dashed, is_iterable_typed
from b2.manager import get_manager
@@ -36,6 +37,8 @@ def create (raw_properties = []):
""" Creates a new 'PropertySet' instance for the given raw properties,
or returns an already existing one.
"""
+ assert (is_iterable_typed(raw_properties, property.Property)
+ or is_iterable_typed(raw_properties, basestring))
# FIXME: propagate to callers.
if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property):
x = raw_properties
@@ -58,6 +61,7 @@ def create_with_validation (raw_properties):
that all properties are valid and converting implicit
properties into gristed form.
"""
+ assert is_iterable_typed(raw_properties, basestring)
properties = [property.create_from_string(s) for s in raw_properties]
property.validate(properties)
@@ -71,7 +75,9 @@ def empty ():
def create_from_user_input(raw_properties, jamfile_module, location):
"""Creates a property-set from the input given by the user, in the
context of 'jamfile-module' at 'location'"""
-
+ assert is_iterable_typed(raw_properties, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
properties = property.create_from_strings(raw_properties, True)
properties = property.translate_paths(properties, location)
properties = property.translate_indirect(properties, jamfile_module)
@@ -95,7 +101,10 @@ def refine_from_user_input(parent_requirements, specification, jamfile_module,
- project-module -- the module to which context indirect features
will be bound.
- location -- the path to which path features are relative."""
-
+ assert isinstance(parent_requirements, PropertySet)
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(jamfile_module, basestring)
+ assert isinstance(location, basestring)
if not specification:
return parent_requirements
@@ -146,7 +155,7 @@ class PropertySet:
caching whenever possible.
"""
def __init__ (self, properties = []):
-
+ assert is_iterable_typed(properties, property.Property)
raw_properties = []
for p in properties:
@@ -304,6 +313,7 @@ class PropertySet:
return self.subfeatures_
def evaluate_conditionals(self, context=None):
+ assert isinstance(context, (PropertySet, type(None)))
if not context:
context = self
@@ -410,6 +420,7 @@ class PropertySet:
""" Creates a new property set containing the properties in this one,
plus the ones of the property set passed as argument.
"""
+ assert isinstance(ps, PropertySet)
if not self.added_.has_key(ps):
self.added_[ps] = create(self.all_ + ps.all())
return self.added_[ps]
@@ -428,6 +439,7 @@ class PropertySet:
feature = feature[0]
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
if not self.feature_map_:
self.feature_map_ = {}
@@ -442,9 +454,9 @@ class PropertySet:
@cached
def get_properties(self, feature):
"""Returns all contained properties associated with 'feature'"""
-
if not isinstance(feature, b2.build.feature.Feature):
feature = b2.build.feature.get(feature)
+ assert isinstance(feature, b2.build.feature.Feature)
result = []
for p in self.all_:
@@ -454,7 +466,7 @@ class PropertySet:
def __contains__(self, item):
return item in self.all_set_
-
+
def hash(p):
m = hashlib.md5()
m.update(p)
diff --git a/src/build/scanner.py b/src/build/scanner.py
index 19f1431d4..ada5d8325 100644
--- a/src/build/scanner.py
+++ b/src/build/scanner.py
@@ -1,10 +1,10 @@
# Status: ported.
# Base revision: 45462
-#
-# Copyright 2003 Dave Abrahams
-# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+#
+# Copyright 2003 Dave Abrahams
+# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Implements scanners: objects that compute implicit dependencies for
# files, such as includes in C++.
@@ -19,10 +19,10 @@
# then associated with actual targets. It is possible to use
# several scanners for a virtual-target. For example, a single source
# might be used by to compile actions, with different include paths.
-# In this case, two different actual targets will be created, each
+# In this case, two different actual targets will be created, each
# having scanner of its own.
#
-# Typically, scanners are created from target type and action's
+# Typically, scanners are created from target type and action's
# properties, using the rule 'get' in this module. Directly creating
# scanners is not recommended, because it might create many equvivalent
# but different instances, and lead in unneeded duplication of
@@ -34,6 +34,8 @@ import bjam
import os
from b2.exceptions import *
from b2.manager import get_manager
+from b2.util import is_iterable_typed
+
def reset ():
""" Clear the module state. This is mainly for testing purposes.
@@ -42,33 +44,37 @@ def reset ():
# Maps registered scanner classes to relevant properties
__scanners = {}
-
+
# A cache of scanners.
- # The key is: class_name.properties_tag, where properties_tag is the concatenation
+ # The key is: class_name.properties_tag, where properties_tag is the concatenation
# of all relevant properties, separated by '-'
__scanner_cache = {}
-
+
reset ()
def register(scanner_class, relevant_properties):
- """ Registers a new generator class, specifying a set of
+ """ Registers a new generator class, specifying a set of
properties relevant to this scanner. Ctor for that class
should have one parameter: list of properties.
"""
+ assert issubclass(scanner_class, Scanner)
+ assert isinstance(relevant_properties, basestring)
__scanners[str(scanner_class)] = relevant_properties
def registered(scanner_class):
""" Returns true iff a scanner of that class is registered
"""
return __scanners.has_key(str(scanner_class))
-
+
def get(scanner_class, properties):
""" Returns an instance of previously registered scanner
with the specified properties.
"""
+ assert issubclass(scanner_class, Scanner)
+ assert is_iterable_typed(properties, basestring)
scanner_name = str(scanner_class)
-
+
if not registered(scanner_name):
raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name)
@@ -76,18 +82,18 @@ def get(scanner_class, properties):
r = property.select(relevant_properties, properties)
scanner_id = scanner_name + '.' + '-'.join(r)
-
- if not __scanner_cache.has_key(scanner_name):
- __scanner_cache[scanner_name] = scanner_class(r)
- return __scanner_cache[scanner_name]
+ if not __scanner_cache.has_key(scanner_id):
+ __scanner_cache[scanner_id] = scanner_class(r)
+
+ return __scanner_cache[scanner_id]
class Scanner:
""" Base scanner class.
"""
def __init__ (self):
pass
-
+
def pattern (self):
""" Returns a pattern to use for scanning.
"""
@@ -120,16 +126,19 @@ class CommonScanner(Scanner):
get_manager().scanners().propagate(self, matches)
class ScannerRegistry:
-
+
def __init__ (self, manager):
self.manager_ = manager
self.count_ = 0
self.exported_scanners_ = {}
def install (self, scanner, target, vtarget):
- """ Installs the specified scanner on actual target 'target'.
+ """ Installs the specified scanner on actual target 'target'.
vtarget: virtual target from which 'target' was actualized.
"""
+ assert isinstance(scanner, Scanner)
+ assert isinstance(target, basestring)
+ assert isinstance(vtarget, basestring)
engine = self.manager_.engine()
engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
if not self.exported_scanners_.has_key(scanner):
@@ -141,8 +150,8 @@ class ScannerRegistry:
exported_name = self.exported_scanners_[scanner]
engine.set_target_variable(target, "HDRRULE", exported_name)
-
- # scanner reflects difference in properties affecting
+
+ # scanner reflects difference in properties affecting
# binding of 'target', which will be known when processing
# includes for it, will give information on how to
# interpret quoted includes.
@@ -150,6 +159,8 @@ class ScannerRegistry:
pass
def propagate(self, scanner, targets):
+ assert isinstance(scanner, Scanner)
+ assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring)
engine = self.manager_.engine()
engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
engine.set_target_variable(targets, "HDRRULE",
diff --git a/src/build/targets.jam b/src/build/targets.jam
index 44c8fc9e4..2cfe08e05 100644
--- a/src/build/targets.jam
+++ b/src/build/targets.jam
@@ -336,6 +336,23 @@ class project-target : abstract-target
created. : in project [ full-name ] ;
}
self.alternatives += $(target-instance) ;
+ if ! ( [ $(target-instance).name ] in $(self.alternative-names) )
+ {
+ self.alternative-names += [ $(target-instance).name ] ;
+ }
+ }
+
+ # Checks if an alternative was declared for the target.
+ # Unlike checking for a main target this does not require
+ # building the main targets. And hence can be used in/directly
+ # while loading a project.
+ #
+ rule has-alternative-for-target ( target-name )
+ {
+ if $(target-name) in $(self.alternative-names)
+ {
+ return 1 ;
+ }
}
# Returns a 'main-target' class instance corresponding to 'name'.
diff --git a/src/build/targets.py b/src/build/targets.py
index acf10e4fd..043d90666 100644
--- a/src/build/targets.py
+++ b/src/build/targets.py
@@ -10,10 +10,10 @@
# Supports 'abstract' targets, which are targets explicitly defined in Jamfile.
#
-# Abstract targets are represented by classes derived from 'AbstractTarget' class.
+# Abstract targets are represented by classes derived from 'AbstractTarget' class.
# The first abstract target is 'project_target', which is created for each
# Jamfile, and can be obtained by the 'target' rule in the Jamfile's module.
-# (see project.jam).
+# (see project.jam).
#
# Project targets keep a list of 'MainTarget' instances.
# A main target is what the user explicitly defines in a Jamfile. It is
@@ -36,34 +36,34 @@
# |AbstractTarget |
# +========================+
# |name |
-# |project |
-# | |
-# |generate(properties) = 0|
-# +-----------+------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# +------------------------+------+------------------------------+
-# | | |
-# | | |
-# +----------+-----------+ +------+------+ +------+-------+
-# | project_target | | MainTarget | | BasicTarget |
-# +======================+ 1 * +=============+ alternatives +==============+
-# | generate(properties) |o-----------+ generate |<>------------->| generate |
+# |project |
+# | |
+# |generate(properties) = 0|
+# +-----------+------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# +------------------------+------+------------------------------+
+# | | |
+# | | |
+# +----------+-----------+ +------+------+ +------+-------+
+# | project_target | | MainTarget | | BasicTarget |
+# +======================+ 1 * +=============+ alternatives +==============+
+# | generate(properties) |o-----------+ generate |<>------------->| generate |
# | main-target | +-------------+ | construct = 0|
-# +----------------------+ +--------------+
-# |
-# ^
-# / \
-# +-+-+
-# |
-# |
-# ...--+----------------+------------------+----------------+---+
-# | | | |
-# | | | |
+# +----------------------+ +--------------+
+# |
+# ^
+# / \
+# +-+-+
+# |
+# |
+# ...--+----------------+------------------+----------------+---+
+# | | | |
+# | | | |
# ... ---+-----+ +------+-------+ +------+------+ +--------+-----+
# | | TypedTarget | | make-target | | stage-target |
# . +==============+ +=============+ +==============+
@@ -81,7 +81,7 @@ import property, project, virtual_target, property_set, feature, generators, too
from virtual_target import Subvariant
from b2.exceptions import *
from b2.util.sequence import unique
-from b2.util import path, bjam_signature
+from b2.util import path, bjam_signature, safe_isinstance, is_iterable_typed
from b2.build.errors import user_error_checkpoint
import b2.build.build_request as build_request
@@ -90,7 +90,7 @@ import b2.util.set
_re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$')
class TargetRegistry:
-
+
def __init__ (self):
# All targets that are currently being built.
# Only the key is id (target), the value is the actual object.
@@ -107,6 +107,7 @@ class TargetRegistry:
""" Registers the specified target as a main target alternatives.
Returns 'target'.
"""
+ assert isinstance(target, AbstractTarget)
target.project ().add_alternative (target)
return target
@@ -116,12 +117,15 @@ class TargetRegistry:
as main target instances, and the name of such targets are adjusted to
be '__'. Such renaming
is disabled is non-empty value is passed for 'no-renaming' parameter."""
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(main_target_name, basestring)
+ assert isinstance(no_renaming, (int, bool))
result = []
for t in sources:
t = b2.util.jam_to_value_maybe(t)
-
+
if isinstance (t, AbstractTarget):
name = t.name ()
@@ -131,7 +135,7 @@ class TargetRegistry:
# Inline targets are not built by default.
p = t.project()
- p.mark_targets_as_explicit([name])
+ p.mark_targets_as_explicit([name])
result.append(name)
else:
@@ -145,11 +149,12 @@ class TargetRegistry:
which are obtained by
- translating all specified property paths, and
- refining project requirements with the one specified for the target
-
+
'specification' are the properties xplicitly specified for a
main target
'project' is the project where the main taret is to be declared."""
-
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
specification.extend(toolset.requirements())
requirements = property_set.refine_from_user_input(
@@ -166,6 +171,8 @@ class TargetRegistry:
specification: Use-properties explicitly specified for a main target
project: Project where the main target is to be declared
"""
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
project_usage_requirements = project.get ('usage-requirements')
# We don't use 'refine-from-user-input' because I'm not sure if:
@@ -174,7 +181,7 @@ class TargetRegistry:
# are always free.
usage_requirements = property_set.create_from_user_input(
specification, project.project_module(), project.get("location"))
-
+
return project_usage_requirements.add (usage_requirements)
def main_target_default_build (self, specification, project):
@@ -184,6 +191,8 @@ class TargetRegistry:
specification: Default build explicitly specified for a main target
project: Project where the main target is to be declared
"""
+ assert is_iterable_typed(specification, basestring)
+ assert isinstance(project, ProjectTarget)
if specification:
return property_set.create_with_validation(specification)
else:
@@ -192,16 +201,18 @@ class TargetRegistry:
def start_building (self, main_target_instance):
""" Helper rules to detect cycles in main target references.
"""
+ assert isinstance(main_target_instance, MainTarget)
if self.targets_being_built_.has_key(id(main_target_instance)):
names = []
for t in self.targets_being_built_.values() + [main_target_instance]:
names.append (t.full_name())
-
+
get_manager().errors()("Recursion in main target references\n")
-
+
self.targets_being_built_[id(main_target_instance)] = main_target_instance
def end_building (self, main_target_instance):
+ assert isinstance(main_target_instance, MainTarget)
assert (self.targets_being_built_.has_key (id (main_target_instance)))
del self.targets_being_built_ [id (main_target_instance)]
@@ -211,6 +222,11 @@ class TargetRegistry:
'usage_requirements' are assumed to be in the form specified
by the user in Jamfile corresponding to 'project'.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
return self.main_target_alternative (TypedTarget (name, project, type,
self.main_target_sources (sources, name),
self.main_target_requirements (requirements, project),
@@ -231,6 +247,7 @@ class TargetRegistry:
print self.indent_ + message
def push_target(self, target):
+ assert isinstance(target, AbstractTarget)
self.targets_.append(target)
def pop_target(self):
@@ -241,14 +258,15 @@ class TargetRegistry:
class GenerateResult:
-
+
def __init__ (self, ur=None, targets=None):
if not targets:
targets = []
-
+ assert isinstance(ur, property_set.PropertySet) or ur is None
+ assert is_iterable_typed(targets, virtual_target.VirtualTarget)
+
self.__usage_requirements = ur
self.__targets = targets
- assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets)
if not self.__usage_requirements:
self.__usage_requirements = property_set.empty ()
@@ -258,10 +276,10 @@ class GenerateResult:
def targets (self):
return self.__targets
-
+
def extend (self, other):
assert (isinstance (other, GenerateResult))
-
+
self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ())
self.__targets.extend (other.targets ())
@@ -274,12 +292,13 @@ class AbstractTarget:
project: the project target to which this one belongs
manager:the manager object. If none, uses project.manager ()
"""
+ assert isinstance(name, basestring)
assert (isinstance (project, ProjectTarget))
# Note: it might seem that we don't need either name or project at all.
# However, there are places where we really need it. One example is error
# messages which should name problematic targets. Another is setting correct
# paths for sources and generated files.
-
+
# Why allow manager to be specified? Because otherwise project target could not derive
# from this class.
if manager:
@@ -288,47 +307,48 @@ class AbstractTarget:
self.manager_ = project.manager ()
self.name_ = name
- self.project_ = project
-
+ self.project_ = project
+
def manager (self):
return self.manager_
-
+
def name (self):
""" Returns the name of this target.
"""
return self.name_
-
+
def project (self):
""" Returns the project for this target.
"""
return self.project_
-
+
def location (self):
""" Return the location where the target was declared.
"""
return self.location_
-
+
def full_name (self):
""" Returns a user-readable name for this target.
"""
location = self.project ().get ('location')
return location + '/' + self.name_
-
+
def generate (self, property_set):
""" Takes a property set. Generates virtual targets for this abstract
target, using the specified properties, unless a different value of some
- feature is required by the target.
+ feature is required by the target.
On success, returns a GenerateResult instance with:
- a property_set with the usage requirements to be
- applied to dependents
+ applied to dependents
- a list of produced virtual targets, which may be
- empty.
+ empty.
If 'property_set' is empty, performs default build of this
target, in a way specific to derived class.
"""
raise BaseException ("method should be defined in derived classes")
-
+
def rename (self, new_name):
+ assert isinstance(new_name, basestring)
self.name_ = new_name
class ProjectTarget (AbstractTarget):
@@ -346,28 +366,32 @@ class ProjectTarget (AbstractTarget):
all alternatives are enumerated an main targets are created.
"""
def __init__ (self, manager, name, project_module, parent_project, requirements, default_build):
+ assert isinstance(project_module, basestring)
+ assert isinstance(parent_project, (ProjectTarget, type(None)))
+ assert isinstance(requirements, (type(None), property_set.PropertySet))
+ assert isinstance(default_build, (type(None), property_set.PropertySet))
AbstractTarget.__init__ (self, name, self, manager)
-
+
self.project_module_ = project_module
self.location_ = manager.projects().attribute (project_module, 'location')
self.requirements_ = requirements
self.default_build_ = default_build
-
+
self.build_dir_ = None
-
+
# A cache of IDs
self.ids_cache_ = {}
-
+
# True is main targets have already been built.
self.built_main_targets_ = False
-
+
# A list of the registered alternatives for this project.
self.alternatives_ = []
# A map from main target name to the target corresponding
# to it.
self.main_target_ = {}
-
+
# Targets marked as explicit.
self.explicit_targets_ = set()
@@ -388,8 +412,9 @@ class ProjectTarget (AbstractTarget):
# way to make 'make' work without this method.
def project_module (self):
return self.project_module_
-
+
def get (self, attribute):
+ assert isinstance(attribute, basestring)
return self.manager().projects().attribute(
self.project_module_, attribute)
@@ -404,16 +429,17 @@ class ProjectTarget (AbstractTarget):
def generate (self, ps):
""" Generates all possible targets contained in this project.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.targets().log(
"Building project '%s' with '%s'" % (self.name (), str(ps)))
self.manager_.targets().increase_indent ()
-
+
result = GenerateResult ()
-
+
for t in self.targets_to_build ():
g = t.generate (ps)
result.extend (g)
-
+
self.manager_.targets().decrease_indent ()
return result
@@ -422,10 +448,10 @@ class ProjectTarget (AbstractTarget):
must be built when this project is built.
"""
result = []
-
+
if not self.built_main_targets_:
self.build_main_targets ()
-
+
# Collect all main targets here, except for "explicit" ones.
for n, t in self.main_target_.iteritems ():
if not t.name () in self.explicit_targets_:
@@ -435,29 +461,33 @@ class ProjectTarget (AbstractTarget):
self_location = self.get ('location')
for pn in self.get ('projects-to-build'):
result.append (self.find(pn + "/"))
-
+
return result
def mark_targets_as_explicit (self, target_names):
"""Add 'target' to the list of targets in this project
that should be build only by explicit request."""
-
+
# Record the name of the target, not instance, since this
# rule is called before main target instaces are created.
+ assert is_iterable_typed(target_names, basestring)
self.explicit_targets_.update(target_names)
def mark_targets_as_always(self, target_names):
+ assert is_iterable_typed(target_names, basestring)
self.always_targets_.update(target_names)
-
+
def add_alternative (self, target_instance):
""" Add new target alternative.
"""
+ assert isinstance(target_instance, AbstractTarget)
if self.built_main_targets_:
raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ())
self.alternatives_.append (target_instance)
def main_target (self, name):
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets()
@@ -465,17 +495,19 @@ class ProjectTarget (AbstractTarget):
def has_main_target (self, name):
"""Tells if a main target with the specified name exists."""
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets()
return self.main_target_.has_key(name)
-
+
def create_main_target (self, name):
""" Returns a 'MainTarget' class instance corresponding to the 'name'.
"""
+ assert isinstance(name, basestring)
if not self.built_main_targets_:
self.build_main_targets ()
-
+
return self.main_targets_.get (name, None)
@@ -483,7 +515,9 @@ class ProjectTarget (AbstractTarget):
""" Find and return the target with the specified id, treated
relative to self.
"""
- result = None
+ assert isinstance(id, basestring)
+
+ result = None
current_location = self.get ('location')
__re_split_project_target = re.compile (r'(.*)//(.*)')
@@ -497,13 +531,13 @@ class ProjectTarget (AbstractTarget):
target_part = split.group (2)
project_registry = self.project_.manager ().projects ()
-
+
extra_error_message = ''
if project_part:
# There's explicit project part in id. Looks up the
# project and pass the request to it.
pm = project_registry.find (project_part, current_location)
-
+
if pm:
project_target = project_registry.target (pm)
result = project_target.find (target_part, no_error=1)
@@ -520,7 +554,7 @@ class ProjectTarget (AbstractTarget):
#
# After first build we'll have target 'test' in Jamfile and file
# 'test' on the disk. We need target to override the file.
-
+
result = None
if self.has_main_target(id):
result = self.main_target(id)
@@ -531,19 +565,21 @@ class ProjectTarget (AbstractTarget):
# File actually does not exist.
# Reset 'target' so that an error is issued.
result = None
-
+
if not result:
# Interpret id as project-id
project_module = project_registry.find (id, current_location)
if project_module:
result = project_registry.target (project_module)
-
+
return result
def find (self, id, no_error = False):
+ assert isinstance(id, basestring)
+ assert isinstance(no_error, int) # also matches bools
v = self.ids_cache_.get (id, None)
-
+
if not v:
v = self.find_really (id)
self.ids_cache_ [id] = v
@@ -553,10 +589,10 @@ class ProjectTarget (AbstractTarget):
raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location')))
-
+
def build_main_targets (self):
self.built_main_targets_ = True
-
+
for a in self.alternatives_:
name = a.name ()
if not self.main_target_.has_key (name):
@@ -565,7 +601,7 @@ class ProjectTarget (AbstractTarget):
if name in self.always_targets_:
a.always()
-
+
self.main_target_ [name].add_alternative (a)
def add_constant(self, name, value, path=0):
@@ -576,17 +612,19 @@ class ProjectTarget (AbstractTarget):
the constant will be interpreted relatively
to the location of project.
"""
-
+ assert isinstance(name, basestring)
+ assert isinstance(value, basestring)
+ assert isinstance(path, int) # will also match bools
if path:
l = self.location_
if not l:
- # Project corresponding to config files do not have
+ # Project corresponding to config files do not have
# 'location' attribute, but do have source location.
# It might be more reasonable to make every project have
# a location and use some other approach to prevent buildable
# targets in config files, but that's for later.
- l = get('source-location')
-
+ l = self.get('source-location')
+
value = os.path.join(l, value)
# Now make the value absolute path. Constants should be in
# platform-native form.
@@ -596,12 +634,13 @@ class ProjectTarget (AbstractTarget):
bjam.call("set-variable", self.project_module(), name, value)
def inherit(self, parent_project):
+ assert isinstance(parent_project, ProjectTarget)
for c in parent_project.constants_:
# No need to pass the type. Path constants were converted to
# absolute paths already by parent.
self.add_constant(c, parent_project.constants_[c])
-
- # Import rules from parent
+
+ # Import rules from parent
this_module = self.project_module()
parent_module = parent_project.project_module()
@@ -612,20 +651,21 @@ class ProjectTarget (AbstractTarget):
if x not in self.manager().projects().project_rules().all_names()]
if user_rules:
bjam.call("import-rules-from-parent", parent_module, this_module, user_rules)
-
+
class MainTarget (AbstractTarget):
""" A named top-level target in Jamfile.
"""
def __init__ (self, name, project):
- AbstractTarget.__init__ (self, name, project)
+ AbstractTarget.__init__ (self, name, project)
self.alternatives_ = []
self.default_build_ = property_set.empty ()
-
+
def add_alternative (self, target):
""" Add a new alternative for this target.
"""
+ assert isinstance(target, AbstractTarget)
d = target.default_build ()
-
+
if self.alternatives_ and self.default_build_ != d:
get_manager().errors()("default build must be identical in all alternatives\n"
"main target is '%s'\n"
@@ -637,7 +677,7 @@ class MainTarget (AbstractTarget):
self.alternatives_.append (target)
- def __select_alternatives (self, property_set, debug):
+ def __select_alternatives (self, property_set_, debug):
""" Returns the best viable alternative for this property_set
See the documentation for selection rules.
# TODO: shouldn't this be 'alternative' (singular)?
@@ -647,14 +687,17 @@ class MainTarget (AbstractTarget):
# lib l : l.cpp : debug ;
# lib l : l_opt.cpp : release ;
# won't work unless we add default value debug.
- property_set = property_set.add_defaults ()
-
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(debug, int) # also matches bools
+
+ property_set_ = property_set_.add_defaults ()
+
# The algorithm: we keep the current best viable alternative.
# When we've got new best viable alternative, we compare it
- # with the current one.
+ # with the current one.
best = None
best_properties = None
-
+
if len (self.alternatives_) == 0:
return None
@@ -662,11 +705,11 @@ class MainTarget (AbstractTarget):
return self.alternatives_ [0]
if debug:
- print "Property set for selection:", property_set
+ print "Property set for selection:", property_set_
for v in self.alternatives_:
- properties = v.match (property_set, debug)
-
+ properties = v.match (property_set_, debug)
+
if properties is not None:
if not best:
best = v
@@ -689,8 +732,9 @@ class MainTarget (AbstractTarget):
return best
- def apply_default_build (self, property_set):
- return apply_default_build(property_set, self.default_build_)
+ def apply_default_build (self, property_set_):
+ assert isinstance(property_set_, property_set.PropertySet)
+ return apply_default_build(property_set_, self.default_build_)
def generate (self, ps):
""" Select an alternative for this main target, by finding all alternatives
@@ -698,23 +742,24 @@ class MainTarget (AbstractTarget):
longest requirements set.
Returns the result of calling 'generate' on that alternative.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.targets ().start_building (self)
# We want composite properties in build request act as if
# all the properties it expands too are explicitly specified.
ps = ps.expand ()
-
+
all_property_sets = self.apply_default_build (ps)
result = GenerateResult ()
-
+
for p in all_property_sets:
result.extend (self.__generate_really (p))
self.manager_.targets ().end_building (self)
return result
-
+
def __generate_really (self, prop_set):
""" Generates the main target with the given property set
and returns a list which first element is property_set object
@@ -722,6 +767,7 @@ class MainTarget (AbstractTarget):
generated virtual target in other elements. It's possible
that no targets are generated.
"""
+ assert isinstance(prop_set, property_set.PropertySet)
best_alternative = self.__select_alternatives (prop_set, debug=0)
if not best_alternative:
@@ -732,24 +778,25 @@ class MainTarget (AbstractTarget):
% (self.full_name(),))
result = best_alternative.generate (prop_set)
-
+
# Now return virtual targets for the only alternative
return result
-
+
def rename(self, new_name):
+ assert isinstance(new_name, basestring)
AbstractTarget.rename(self, new_name)
for a in self.alternatives_:
a.rename(new_name)
class FileReference (AbstractTarget):
""" Abstract target which refers to a source file.
- This is artificial creature; it's usefull so that sources to
+ This is artificial creature; it's usefull so that sources to
a target can be represented as list of abstract target instances.
"""
def __init__ (self, manager, file, project):
AbstractTarget.__init__ (self, file, project)
self.file_location_ = None
-
+
def generate (self, properties):
return GenerateResult (None, [
self.manager_.virtual_targets ().from_file (
@@ -767,7 +814,7 @@ class FileReference (AbstractTarget):
# Returns the location of target. Needed by 'testing.jam'
if not self.file_location_:
source_location = self.project_.get('source-location')
-
+
for src_dir in source_location:
location = os.path.join(src_dir, self.name())
if os.path.isfile(location):
@@ -783,24 +830,26 @@ def resolve_reference(target_reference, project):
as properties explicitly specified for this reference.
"""
# Separate target name from properties override
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
split = _re_separate_target_from_properties.match (target_reference)
if not split:
raise BaseException ("Invalid reference: '%s'" % target_reference)
-
+
id = split.group (1)
-
+
sproperties = []
-
+
if split.group (3):
sproperties = property.create_from_strings(feature.split(split.group(3)))
sproperties = feature.expand_composites(sproperties)
-
+
# Find the target
target = project.find (id)
-
+
return (target, property_set.create(sproperties))
-def generate_from_reference(target_reference, project, property_set):
+def generate_from_reference(target_reference, project, property_set_):
""" Attempts to generate the target given by target reference, which
can refer both to a main target or to a file.
Returns a list consisting of
@@ -810,13 +859,16 @@ def generate_from_reference(target_reference, project, property_set):
project: Project where the reference is made
property_set: Properties of the main target that makes the reference
"""
+ assert isinstance(target_reference, basestring)
+ assert isinstance(project, ProjectTarget)
+ assert isinstance(property_set_, property_set.PropertySet)
target, sproperties = resolve_reference(target_reference, project)
-
+
# Take properties which should be propagated and refine them
# with source-specific requirements.
- propagated = property_set.propagated()
+ propagated = property_set_.propagated()
rproperties = propagated.refine(sproperties)
-
+
return target.generate(rproperties)
@@ -828,14 +880,18 @@ class BasicTarget (AbstractTarget):
targets.
"""
def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None):
+ assert is_iterable_typed(sources, basestring)
+ assert isinstance(requirements, property_set.PropertySet)
+ assert isinstance(default_build, property_set.PropertySet)
+ assert isinstance(usage_requirements, property_set.PropertySet)
AbstractTarget.__init__ (self, name, project)
-
+
for s in sources:
if get_grist (s):
raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name))
-
+
self.sources_ = sources
-
+
if not requirements: requirements = property_set.empty ()
self.requirements_ = requirements
@@ -844,13 +900,13 @@ class BasicTarget (AbstractTarget):
if not usage_requirements: usage_requirements = property_set.empty ()
self.usage_requirements_ = usage_requirements
-
+
# A cache for resolved references
self.source_targets_ = None
-
+
# A cache for generated targets
self.generated_ = {}
-
+
# A cache for build requests
self.request_cache = {}
@@ -865,12 +921,12 @@ class BasicTarget (AbstractTarget):
def always(self):
self.always_ = True
-
+
def sources (self):
""" Returns the list of AbstractTargets which are used as sources.
The extra properties specified for sources are not represented.
The only used of this rule at the moment is the '--dump-tests'
- feature of the test system.
+ feature of the test system.
"""
if self.source_targets_ == None:
self.source_targets_ = []
@@ -881,7 +937,7 @@ class BasicTarget (AbstractTarget):
def requirements (self):
return self.requirements_
-
+
def default_build (self):
return self.default_build_
@@ -892,8 +948,10 @@ class BasicTarget (AbstractTarget):
"""
# For optimization, we add free unconditional requirements directly,
# without using complex algorithsm.
- # This gives the complex algorithm better chance of caching results.
+ # This gives the complex algorithm better chance of caching results.
# The exact effect of this "optimization" is no longer clear
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
free_unconditional = []
other = []
for p in requirements.all():
@@ -902,7 +960,7 @@ class BasicTarget (AbstractTarget):
else:
other.append(p)
other = property_set.create(other)
-
+
key = (build_request, other)
if not self.request_cache.has_key(key):
self.request_cache[key] = self.__common_properties2 (build_request, other)
@@ -910,8 +968,8 @@ class BasicTarget (AbstractTarget):
return self.request_cache[key].add_raw(free_unconditional)
# Given 'context' -- a set of already present properties, and 'requirements',
- # decide which extra properties should be applied to 'context'.
- # For conditional requirements, this means evaluating condition. For
+ # decide which extra properties should be applied to 'context'.
+ # For conditional requirements, this means evaluating condition. For
# indirect conditional requirements, this means calling a rule. Ordinary
# requirements are always applied.
#
@@ -920,20 +978,23 @@ class BasicTarget (AbstractTarget):
#
# gcc:release release:RELEASE
#
- # If 'what' is 'refined' returns context refined with new requirements.
+ # If 'what' is 'refined' returns context refined with new requirements.
# If 'what' is 'added' returns just the requirements that must be applied.
def evaluate_requirements(self, requirements, context, what):
- # Apply non-conditional requirements.
- # It's possible that that further conditional requirement change
+ # Apply non-conditional requirements.
+ # It's possible that that further conditional requirement change
# a value set by non-conditional requirements. For example:
#
# exe a : a.cpp : single foo:multi ;
- #
+ #
# I'm not sure if this should be an error, or not, especially given that
#
- # single
+ # single
#
# might come from project's requirements.
+ assert isinstance(requirements, property_set.PropertySet)
+ assert isinstance(context, property_set.PropertySet)
+ assert isinstance(what, basestring)
unconditional = feature.expand(requirements.non_conditional())
context = context.refine(property_set.create(unconditional))
@@ -941,7 +1002,7 @@ class BasicTarget (AbstractTarget):
# We've collected properties that surely must be present in common
# properties. We now try to figure out what other properties
# should be added in order to satisfy rules (4)-(6) from the docs.
-
+
conditionals = property_set.create(requirements.conditional())
# It's supposed that #conditionals iterations
@@ -949,34 +1010,38 @@ class BasicTarget (AbstractTarget):
# direction.
max_iterations = len(conditionals.all()) +\
len(requirements.get("")) + 1
-
+
added_requirements = []
current = context
-
+
# It's assumed that ordinary conditional requirements can't add
# properties, and that rules referred
- # by properties can't add new
+ # by properties can't add new
# properties. So the list of indirect conditionals
# does not change.
indirect = requirements.get("")
-
+
ok = 0
for i in range(0, max_iterations):
e = conditionals.evaluate_conditionals(current).all()[:]
-
+
# Evaluate indirect conditionals.
for i in indirect:
+ new = None
i = b2.util.jam_to_value_maybe(i)
if callable(i):
# This is Python callable, yeah.
- e.extend(i(current))
+ new = i(current)
else:
# Name of bjam function. Because bjam is unable to handle
# list of Property, pass list of strings.
br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()])
if br:
- e.extend(property.create_from_strings(br))
+ new = property.create_from_strings(br)
+ if new:
+ new = property.translate_paths(new, self.project().location())
+ e.extend(new)
if e == added_requirements:
# If we got the same result, we've found final properties.
@@ -994,7 +1059,7 @@ class BasicTarget (AbstractTarget):
self.manager().errors()("Can't evaluate conditional properties "
+ str(conditionals))
-
+
if what == "added":
return property_set.create(unconditional + added_requirements)
elif what == "refined":
@@ -1009,57 +1074,62 @@ class BasicTarget (AbstractTarget):
# and expands to bar2, but default value of is not bar2,
# in which case it's not clear what to do.
#
+ assert isinstance(build_request, property_set.PropertySet)
+ assert isinstance(requirements, property_set.PropertySet)
build_request = build_request.add_defaults()
# Featured added by 'add-default' can be composite and expand
# to features without default values -- so they are not added yet.
# It could be clearer/faster to expand only newly added properties
# but that's not critical.
build_request = build_request.expand()
-
+
return self.evaluate_requirements(requirements, build_request,
"refined")
-
- def match (self, property_set, debug):
+
+ def match (self, property_set_, debug):
""" Returns the alternative condition for this alternative, if
the condition is satisfied by 'property_set'.
"""
# The condition is composed of all base non-conditional properties.
# It's not clear if we should expand 'self.requirements_' or not.
# For one thing, it would be nice to be able to put
- # msvc-6.0
+ # msvc-6.0
# in requirements.
- # On the other hand, if we have release in condition it
+ # On the other hand, if we have release in condition it
# does not make sense to require full to be in
# build request just to select this variant.
+ assert isinstance(property_set_, property_set.PropertySet)
bcondition = self.requirements_.base ()
ccondition = self.requirements_.conditional ()
condition = b2.util.set.difference (bcondition, ccondition)
if debug:
print " next alternative: required properties:", [str(p) for p in condition]
-
- if b2.util.set.contains (condition, property_set.all()):
+
+ if b2.util.set.contains (condition, property_set_.all()):
if debug:
print " matched"
-
+
return condition
else:
return None
- def generate_dependency_targets (self, target_ids, property_set):
+ def generate_dependency_targets (self, target_ids, property_set_):
+ assert is_iterable_typed(target_ids, basestring)
+ assert isinstance(property_set_, property_set.PropertySet)
targets = []
usage_requirements = []
for id in target_ids:
-
- result = generate_from_reference(id, self.project_, property_set)
+
+ result = generate_from_reference(id, self.project_, property_set_)
targets += result.targets()
usage_requirements += result.usage_requirements().all()
- return (targets, usage_requirements)
-
+ return (targets, usage_requirements)
+
def generate_dependency_properties(self, properties, ps):
""" Takes a target reference, which might be either target id
or a dependency property, and generates that target using
@@ -1067,20 +1137,22 @@ class BasicTarget (AbstractTarget):
Returns a tuple (result, usage_requirements).
"""
+ assert is_iterable_typed(properties, property.Property)
+ assert isinstance(ps, property_set.PropertySet)
result_properties = []
usage_requirements = []
for p in properties:
-
+
result = generate_from_reference(p.value(), self.project_, ps)
for t in result.targets():
result_properties.append(property.Property(p.feature(), t))
-
+
usage_requirements += result.usage_requirements().all()
- return (result_properties, usage_requirements)
+ return (result_properties, usage_requirements)
+
-
@user_error_checkpoint
@@ -1089,9 +1161,10 @@ class BasicTarget (AbstractTarget):
and calls 'construct'. This method should not be
overridden.
"""
+ assert isinstance(ps, property_set.PropertySet)
self.manager_.errors().push_user_context(
"Generating target " + self.full_name(), self.user_context_)
-
+
if self.manager().targets().logging():
self.manager().targets().log(
"Building target '%s'" % self.name_)
@@ -1100,26 +1173,26 @@ class BasicTarget (AbstractTarget):
"Build request: '%s'" % str (ps.raw ()))
cf = self.manager().command_line_free_features()
self.manager().targets().log(
- "Command line free features: '%s'" % str (cf.raw ()))
+ "Command line free features: '%s'" % str (cf.raw ()))
self.manager().targets().log(
"Target requirements: %s'" % str (self.requirements().raw ()))
-
+
self.manager().targets().push_target(self)
if not self.generated_.has_key(ps):
# Apply free features form the command line. If user
- # said
+ # said
# define=FOO
# he most likely want this define to be set for all compiles.
- ps = ps.refine(self.manager().command_line_free_features())
+ ps = ps.refine(self.manager().command_line_free_features())
rproperties = self.common_properties (ps, self.requirements_)
self.manager().targets().log(
"Common properties are '%s'" % str (rproperties))
-
+
if rproperties.get("") != ["no"]:
-
+
result = GenerateResult ()
properties = rproperties.non_dependency ()
@@ -1142,9 +1215,9 @@ class BasicTarget (AbstractTarget):
self.manager_.targets().log(
"Build properties: '%s'" % str(rproperties))
-
+
source_targets += rproperties.get('')
-
+
# We might get duplicate sources, for example if
# we link to two library which have the same in
# usage requirements.
@@ -1170,7 +1243,7 @@ class BasicTarget (AbstractTarget):
self.manager().virtual_targets().recent_targets(), ps,
source_targets, rproperties, usage_requirements)
self.manager().virtual_targets().clear_recent_targets()
-
+
ur = self.compute_usage_requirements (s)
ur = ur.add (gur)
s.set_usage_requirements (ur)
@@ -1178,7 +1251,7 @@ class BasicTarget (AbstractTarget):
self.manager_.targets().log (
"Usage requirements from '%s' are '%s'" %
(self.name(), str(rproperties)))
-
+
self.generated_[ps] = GenerateResult (ur, result)
else:
self.generated_[ps] = GenerateResult (property_set.empty(), [])
@@ -1195,7 +1268,7 @@ class BasicTarget (AbstractTarget):
# dependencies
# - it's not clear if that's a good idea anyway. The alias
# target, for example, should not fail to build if a dependency
- # fails.
+ # fails.
self.generated_[ps] = GenerateResult(
property_set.create(["no"]), [])
else:
@@ -1205,21 +1278,22 @@ class BasicTarget (AbstractTarget):
self.manager().targets().decrease_indent()
return self.generated_[ps]
-
+
def compute_usage_requirements (self, subvariant):
- """ Given the set of generated targets, and refined build
+ """ Given the set of generated targets, and refined build
properties, determines and sets appripriate usage requirements
on those targets.
"""
+ assert isinstance(subvariant, virtual_target.Subvariant)
rproperties = subvariant.build_properties ()
xusage_requirements =self.evaluate_requirements(
self.usage_requirements_, rproperties, "added")
-
+
# We generate all dependency properties and add them,
# as well as their usage requirements, to result.
(r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties)
extra = r1 + r2
-
+
result = property_set.create (xusage_requirements.non_dependency () + extra)
# Propagate usage requirements we've got from sources, except
@@ -1230,7 +1304,7 @@ class BasicTarget (AbstractTarget):
#
# pch pch1 : ...
# lib lib1 : ..... pch1 ;
- # pch pch2 :
+ # pch pch2 :
# lib lib2 : pch2 lib1 ;
#
# Here, lib2 should not get property from pch1.
@@ -1241,7 +1315,7 @@ class BasicTarget (AbstractTarget):
# features are special.
removed_pch = filter(lambda prop: prop.feature().name() not in ['', ''], subvariant.sources_usage_requirements().all())
result = result.add(property_set.PropertySet(removed_pch))
-
+
return result
def create_subvariant (self, root_targets, all_targets,
@@ -1249,23 +1323,29 @@ class BasicTarget (AbstractTarget):
rproperties, usage_requirements):
"""Creates a new subvariant-dg instances for 'targets'
- 'root-targets' the virtual targets will be returned to dependents
- - 'all-targets' all virtual
+ - 'all-targets' all virtual
targets created while building this main target
- 'build-request' is property-set instance with
requested build properties"""
-
+ assert is_iterable_typed(root_targets, virtual_target.VirtualTarget)
+ assert is_iterable_typed(all_targets, virtual_target.VirtualTarget)
+ assert isinstance(build_request, property_set.PropertySet)
+ assert is_iterable_typed(sources, virtual_target.VirtualTarget)
+ assert isinstance(rproperties, property_set.PropertySet)
+ assert isinstance(usage_requirements, property_set.PropertySet)
+
for e in root_targets:
e.root (True)
s = Subvariant (self, build_request, sources,
rproperties, usage_requirements, all_targets)
-
+
for v in all_targets:
if not v.creating_subvariant():
v.creating_subvariant(s)
-
+
return s
-
+
def construct (self, name, source_targets, properties):
""" Constructs the virtual targets for this abstract targets and
the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets.
@@ -1276,21 +1356,24 @@ class BasicTarget (AbstractTarget):
class TypedTarget (BasicTarget):
import generators
-
+
def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements):
+ assert isinstance(type, basestring)
BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements)
self.type_ = type
def __jam_repr__(self):
return b2.util.value_to_jam(self)
-
+
def type (self):
return self.type_
-
- def construct (self, name, source_targets, prop_set):
+ def construct (self, name, source_targets, prop_set):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(source_targets, virtual_target.VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
r = generators.construct (self.project_, os.path.splitext(name)[0],
- self.type_,
+ self.type_,
prop_set.add_raw(['' + self.type_]),
source_targets, True)
@@ -1303,17 +1386,19 @@ class TypedTarget (BasicTarget):
print "error: and the requested properties"
print "error: make sure you've configured the needed tools"
print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
-
+
print "To debug this problem, try the --debug-generators option."
sys.exit(1)
-
+
return r
-def apply_default_build(property_set, default_build):
+def apply_default_build(property_set_, default_build):
# 1. First, see what properties from default_build
- # are already present in property_set.
+ # are already present in property_set.
+ assert isinstance(property_set_, property_set.PropertySet)
+ assert isinstance(default_build, property_set.PropertySet)
- specified_features = set(p.feature() for p in property_set.all())
+ specified_features = set(p.feature() for p in property_set_.all())
defaults_to_apply = []
for d in default_build.all():
@@ -1341,24 +1426,29 @@ def apply_default_build(property_set, default_build):
# be an indication that
# build_request.expand-no-defaults is the wrong rule
# to use here.
- compressed = feature.compress_subproperties(property_set.all())
+ compressed = feature.compress_subproperties(property_set_.all())
result = build_request.expand_no_defaults(
b2.build.property_set.create(feature.expand([p])) for p in (compressed + defaults_to_apply))
else:
- result.append (property_set)
+ result.append (property_set_)
return result
def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements):
-
+ assert isinstance(name, basestring)
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
+
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
TypedTarget(name, project, type,
t.main_target_sources(sources, name),
@@ -1368,17 +1458,22 @@ def create_typed_metatarget(name, type, sources, requirements, default_build, us
def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]):
+ assert isinstance(name, basestring)
+ assert is_iterable_typed(sources, basestring)
+ assert is_iterable_typed(requirements, basestring)
+ assert is_iterable_typed(default_build, basestring)
+ assert is_iterable_typed(usage_requirements, basestring)
from b2.manager import get_manager
t = get_manager().targets()
-
+
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
klass(name, project,
t.main_target_sources(sources, name),
t.main_target_requirements(requirements, project),
t.main_target_default_build(default_build, project),
- t.main_target_usage_requirements(usage_requirements, project)))
+ t.main_target_usage_requirements(usage_requirements, project)))
def metatarget_function_for_class(class_):
@@ -1390,7 +1485,7 @@ def metatarget_function_for_class(class_):
t = get_manager().targets()
project = get_manager().projects().current()
-
+
return t.main_target_alternative(
class_(name, project,
t.main_target_sources(sources, name),
diff --git a/src/build/toolset.py b/src/build/toolset.py
index e969123d4..672d18f5a 100644
--- a/src/build/toolset.py
+++ b/src/build/toolset.py
@@ -1,18 +1,20 @@
# Status: being ported by Vladimir Prus
# Base revision: 40958
#
-# Copyright 2003 Dave Abrahams
-# Copyright 2005 Rene Rivera
-# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
-# Distributed under the Boost Software License, Version 1.0.
-# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
+# Copyright 2003 Dave Abrahams
+# Copyright 2005 Rene Rivera
+# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
+# Distributed under the Boost Software License, Version 1.0.
+# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
""" Support for toolset definition.
"""
import feature, property, generators, property_set
import b2.util.set
-from b2.util import cached, qualify_jam_action
+import bjam
+
+from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable
from b2.util.utility import *
from b2.util import bjam_signature
from b2.manager import get_manager
@@ -22,7 +24,7 @@ __re_two_ampersands = re.compile ('(&&)')
__re_first_segment = re.compile ('([^.]*).*')
__re_first_group = re.compile (r'[^.]*\.(.*)')
-# Flag is a mechanism to set a value
+# Flag is a mechanism to set a value
# A single toolset flag. Specifies that when certain
# properties are in build property set, certain values
# should be appended to some variable.
@@ -30,13 +32,18 @@ __re_first_group = re.compile (r'[^.]*\.(.*)')
# A flag applies to a specific action in specific module.
# The list of all flags for a module is stored, and each
# flag further contains the name of the rule it applies
-# for,
+# for,
class Flag:
def __init__(self, variable_name, values, condition, rule = None):
+ assert isinstance(variable_name, basestring)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert isinstance(rule, (basestring, type(None)))
self.variable_name = variable_name
self.values = values
- self.condition = condition
+ self.condition = condition
self.rule = rule
def __str__(self):
@@ -47,7 +54,7 @@ def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __module_flags, __flags, __stv
-
+
# Mapping from module name to a list of all flags that apply
# to either that module directly, or to any rule in that module.
# Each element of the list is Flag instance.
@@ -61,21 +68,21 @@ def reset ():
# entries for module name 'xxx', they are flags for 'xxx' itself,
# not including any rules in that module.
__flags = {}
-
+
# A cache for varaible settings. The key is generated from the rule name and the properties.
__stv = {}
-
+
reset ()
# FIXME: --ignore-toolset-requirements
def using(toolset_module, *args):
loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]);
loaded_toolset_module.init(*args)
-
+
# FIXME push-checking-for-flags-module ....
# FIXME: investigate existing uses of 'hack-hack' parameter
# in jam code.
-
+
@bjam_signature((["rule_or_module", "variable_name", "condition", "*"],
["values", "*"]))
def flags(rule_or_module, variable_name, condition, values = []):
@@ -84,7 +91,7 @@ def flags(rule_or_module, variable_name, condition, values = []):
rule_or_module: If contains dot, should be a rule name.
The flags will be applied when that rule is
used to set up build actions.
-
+
If does not contain dot, should be a module name.
The flags will be applied for all rules in that
module.
@@ -92,7 +99,7 @@ def flags(rule_or_module, variable_name, condition, values = []):
module, an error is issued.
variable_name: Variable that should be set on target
-
+
condition A condition when this flag should be applied.
Should be set of property sets. If one of
those property sets is contained in build
@@ -102,21 +109,25 @@ def flags(rule_or_module, variable_name, condition, values = []):
"gcc". Subfeatures, like in "gcc-3.2"
are allowed. If left empty, the flag will
always used.
-
- Propery sets may use value-less properties
- ('' vs. 'value') to match absent
+
+ Propery sets may use value-less properties
+ ('' vs. 'value') to match absent
properties. This allows to separately match
-
+
/64
ia64/
-
+
Where both features are optional. Without this
syntax we'd be forced to define "default" value.
values: The value to add to variable. If
- is specified, then the value of 'feature'
+ is specified, then the value of 'feature'
will be added.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, basestring)
+ assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values)
caller = bjam.caller()
if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"):
# Unqualified rule name, used inside Jamfile. Most likely used with
@@ -129,17 +140,17 @@ def flags(rule_or_module, variable_name, condition, values = []):
# FIXME: revive checking that we don't set flags for a different
# module unintentionally
pass
-
+
if condition and not replace_grist (condition, ''):
# We have condition in the form '', that is, without
# value. That's a previous syntax:
#
# flags gcc.link RPATH ;
# for compatibility, convert it to
- # flags gcc.link RPATH : ;
+ # flags gcc.link RPATH : ;
values = [ condition ]
condition = None
-
+
if condition:
transformed = []
for c in condition:
@@ -150,14 +161,17 @@ def flags(rule_or_module, variable_name, condition, values = []):
condition = transformed
property.validate_property_sets(condition)
-
+
__add_flag (rule_or_module, variable_name, condition, values)
def set_target_variables (manager, rule_or_module, targets, ps):
"""
"""
+ assert isinstance(rule_or_module, basestring)
+ assert is_iterable_typed(targets, basestring)
+ assert isinstance(ps, property_set.PropertySet)
settings = __set_target_variables_aux(manager, rule_or_module, ps)
-
+
if settings:
for s in settings:
for target in targets:
@@ -166,7 +180,8 @@ def set_target_variables (manager, rule_or_module, targets, ps):
def find_satisfied_condition(conditions, ps):
"""Returns the first element of 'property-sets' which is a subset of
'properties', or an empty list if no such element exists."""
-
+ assert is_iterable_typed(conditions, property_set.PropertySet)
+ assert isinstance(ps, property_set.PropertySet)
features = set(p.feature() for p in ps.all())
for condition in conditions:
@@ -177,11 +192,11 @@ def find_satisfied_condition(conditions, ps):
found = False
if i.value():
found = i.value() in ps.get(i.feature())
- else:
- # Handle value-less properties like '' (compare with
+ else:
+ # Handle value-less properties like '' (compare with
# 'x86').
- # If $(i) is a value-less property it should match default
- # value of an optional property. See the first line in the
+ # If $(i) is a value-less property it should match default
+ # value of an optional property. See the first line in the
# example below:
#
# property set properties result
@@ -197,22 +212,27 @@ def find_satisfied_condition(conditions, ps):
return condition
return None
-
+
def register (toolset):
""" Registers a new toolset.
"""
+ assert isinstance(toolset, basestring)
feature.extend('toolset', [toolset])
def inherit_generators (toolset, properties, base, generators_to_ignore = []):
+ assert isinstance(toolset, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(generators_to_ignore, basestring)
if not properties:
properties = [replace_grist (toolset, '')]
-
+
base_generators = generators.generators_for_toolset(base)
-
+
for g in base_generators:
id = g.id()
-
+
if not id in generators_to_ignore:
# Some generator names have multiple periods in their name, so
# $(id:B=$(toolset)) doesn't generate the right new_id name.
@@ -232,13 +252,16 @@ def inherit_flags(toolset, base, prohibited_properties = []):
'prohibited-properties' are ignored. Don't confuse property and feature, for
example on and off, so blocking one of them does
not block the other one.
-
+
The flag conditions are not altered at all, so if a condition includes a name,
or version of a base toolset, it won't ever match the inheriting toolset. When
such flag settings must be inherited, define a rule in base toolset module and
call it as needed."""
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
+ assert is_iterable_typed(prohibited_properties, basestring)
for f in __module_flags.get(base, []):
-
+
if not f.condition or b2.util.set.difference(f.condition, prohibited_properties):
match = __re_first_group.match(f.rule)
rule_ = None
@@ -254,38 +277,20 @@ def inherit_flags(toolset, base, prohibited_properties = []):
__add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
-def inherit_rules (toolset, base):
- pass
- # FIXME: do something about this.
-# base_generators = generators.generators_for_toolset (base)
-# import action
+def inherit_rules(toolset, base):
+ engine = get_manager().engine()
+ new_actions = {}
+ for action_name, action in engine.actions.iteritems():
+ module, id = split_action_id(action_name)
+ if module == base:
+ new_action_name = toolset + '.' + id
+ # make sure not to override any existing actions
+ # that may have been declared already
+ if new_action_name not in engine.actions:
+ new_actions[new_action_name] = action
-# ids = []
-# for g in base_generators:
-# (old_toolset, id) = split_action_id (g.id ())
-# ids.append (id) ;
-
-# new_actions = []
-
-# engine = get_manager().engine()
- # FIXME: do this!
-# for action in engine.action.values():
-# pass
-# (old_toolset, id) = split_action_id(action.action_name)
-#
-# if old_toolset == base:
-# new_actions.append ((id, value [0], value [1]))
-#
-# for a in new_actions:
-# action.register (toolset + '.' + a [0], a [1], a [2])
-
- # TODO: how to deal with this?
-# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
-# # Import the rules to the global scope
-# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
-# }
-#
+ engine.actions.update(new_actions)
######################################################################################
# Private functions
@@ -294,12 +299,14 @@ def inherit_rules (toolset, base):
def __set_target_variables_aux (manager, rule_or_module, ps):
""" Given a rule name and a property set, returns a list of tuples of
variables names and values, which must be set on targets for that
- rule/properties combination.
+ rule/properties combination.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(ps, property_set.PropertySet)
result = []
for f in __flags.get(rule_or_module, []):
-
+
if not f.condition or find_satisfied_condition (f.condition, ps):
processed = []
for v in f.values:
@@ -309,10 +316,10 @@ def __set_target_variables_aux (manager, rule_or_module, ps):
for r in processed:
result.append ((f.variable_name, r))
-
+
# strip away last dot separated part and recurse.
next = __re_split_last_segment.match(rule_or_module)
-
+
if next:
result.extend(__set_target_variables_aux(
manager, next.group(1), ps))
@@ -320,12 +327,14 @@ def __set_target_variables_aux (manager, rule_or_module, ps):
return result
def __handle_flag_value (manager, value, ps):
+ assert isinstance(value, basestring)
+ assert isinstance(ps, property_set.PropertySet)
result = []
-
+
if get_grist (value):
f = feature.get(value)
values = ps.get(f)
-
+
for value in values:
if f.dependency():
@@ -334,7 +343,7 @@ def __handle_flag_value (manager, value, ps):
result.append(value.actualize())
elif f.path() or f.free():
-
+
# Treat features with && in the value
# specially -- each &&-separated element is considered
# separate value. This is needed to handle searched
@@ -355,8 +364,13 @@ def __add_flag (rule_or_module, variable_name, condition, values):
""" Adds a new flag setting with the specified values.
Does no checking.
"""
+ assert isinstance(rule_or_module, basestring)
+ assert isinstance(variable_name, basestring)
+ assert is_iterable_typed(condition, property_set.PropertySet)
+ assert is_iterable(values) and all(
+ isinstance(v, (basestring, type(None))) for v in values)
f = Flag(variable_name, values, condition, rule_or_module)
-
+
# Grab the name of the module
m = __re_first_segment.match (rule_or_module)
assert m
@@ -377,21 +391,24 @@ def add_requirements(requirements):
will be automatically added to the requirements for all main targets, as if
they were specified literally. For best results, all requirements added should
be conditional or indirect conditional."""
-
+ assert is_iterable_typed(requirements, basestring)
+
#if ! $(.ignore-requirements)
#{
__requirements.extend(requirements)
#}
-
+
# Make toolset 'toolset', defined in a module of the same name,
# inherit from 'base'
# 1. The 'init' rule from 'base' is imported into 'toolset' with full
# name. Another 'init' is called, which forwards to the base one.
-# 2. All generators from 'base' are cloned. The ids are adjusted and
+# 2. All generators from 'base' are cloned. The ids are adjusted and
# property in requires is adjusted too
# 3. All flags are inherited
# 4. All rules are imported.
def inherit(toolset, base):
+ assert isinstance(toolset, basestring)
+ assert isinstance(base, basestring)
get_manager().projects().load_module(base, []);
inherit_generators(toolset, [], base)
diff --git a/src/build/type.py b/src/build/type.py
index e815739f4..c8d6334c7 100644
--- a/src/build/type.py
+++ b/src/build/type.py
@@ -14,7 +14,7 @@ import os.path
from b2.util.utility import replace_grist, os_name
from b2.exceptions import *
from b2.build import feature, property, scanner
-from b2.util import bjam_signature
+from b2.util import bjam_signature, is_iterable_typed
__re_hyphen = re.compile ('-')
@@ -32,17 +32,17 @@ def __register_features ():
def reset ():
""" Clear the module state. This is mainly for testing purposes.
Note that this must be called _after_ resetting the module 'feature'.
- """
+ """
global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
-
+
__register_features ()
# Stores suffixes for generated targets.
__prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
-
+
# Maps suffixes to types
__suffixes_to_types = {}
-
+
# A map with all the registered types, indexed by the type name
# Each entry is a dictionary with following values:
# 'base': the name of base type or None if type has no base
@@ -52,12 +52,12 @@ def reset ():
# Caches suffixes for targets with certain properties.
__target_suffixes_cache = {}
-
+
reset ()
@bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"]))
def register (type, suffixes = [], base_type = None):
- """ Registers a target type, possibly derived from a 'base-type'.
+ """ Registers a target type, possibly derived from a 'base-type'.
If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
Also, the first element gives the suffix to be used when constructing and object of
'type'.
@@ -70,7 +70,7 @@ def register (type, suffixes = [], base_type = None):
# which need to be decomposed.
if __re_hyphen.search (type):
raise BaseException ('type name "%s" contains a hyphen' % type)
-
+
if __types.has_key (type):
raise BaseException ('Type "%s" is already registered.' % type)
@@ -79,7 +79,7 @@ def register (type, suffixes = [], base_type = None):
entry ['derived'] = []
entry ['scanner'] = None
__types [type] = entry
-
+
if base_type:
__types.setdefault(base_type, {}).setdefault('derived', []).append(type)
@@ -87,17 +87,17 @@ def register (type, suffixes = [], base_type = None):
# Generated targets of 'type' will use the first of 'suffixes'
# (this may be overriden)
set_generated_target_suffix (type, [], suffixes [0])
-
+
# Specify mapping from suffixes to type
register_suffixes (suffixes, type)
-
+
feature.extend('target-type', [type])
feature.extend('main-target-type', [type])
feature.extend('base-target-type', [type])
if base_type:
- feature.compose ('' + type, replace_grist (base_type, ''))
- feature.compose ('' + type, '' + base_type)
+ feature.compose ('' + type, [replace_grist (base_type, '')])
+ feature.compose ('' + type, ['' + base_type])
import b2.build.generators as generators
# Adding a new derived type affects generator selection so we need to
@@ -111,13 +111,16 @@ def register (type, suffixes = [], base_type = None):
# FIXME: quick hack.
def type_from_rule_name(rule_name):
+ assert isinstance(rule_name, basestring)
return rule_name.upper().replace("-", "_")
def register_suffixes (suffixes, type):
- """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
+ """ Specifies that targets with suffix from 'suffixes' have the type 'type'.
If a different type is already specified for any of syffixes, issues an error.
"""
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(type, basestring)
for s in suffixes:
if __suffixes_to_types.has_key (s):
old_type = __suffixes_to_types [s]
@@ -129,40 +132,51 @@ def register_suffixes (suffixes, type):
def registered (type):
""" Returns true iff type has been registered.
"""
+ assert isinstance(type, basestring)
return __types.has_key (type)
def validate (type):
""" Issues an error if 'type' is unknown.
"""
+ assert isinstance(type, basestring)
if not registered (type):
raise BaseException ("Unknown target type '%s'" % type)
def set_scanner (type, scanner):
""" Sets a scanner class that will be used for this 'type'.
"""
+ if __debug__:
+ from .scanner import Scanner
+ assert isinstance(type, basestring)
+ assert issubclass(scanner, Scanner)
validate (type)
__types [type]['scanner'] = scanner
def get_scanner (type, prop_set):
""" Returns a scanner instance appropriate to 'type' and 'property_set'.
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
if registered (type):
scanner_type = __types [type]['scanner']
if scanner_type:
return scanner.get (scanner_type, prop_set.raw ())
pass
-
+
return None
def base(type):
"""Returns a base type for the given type or nothing in case the given type is
not derived."""
-
+ assert isinstance(type, basestring)
return __types[type]['base']
def all_bases (type):
""" Returns type and all of its bases, in the order of their distance from type.
"""
+ assert isinstance(type, basestring)
result = []
while type:
result.append (type)
@@ -173,6 +187,7 @@ def all_bases (type):
def all_derived (type):
""" Returns type and all classes that derive from it, in the order of their distance from type.
"""
+ assert isinstance(type, basestring)
result = [type]
for d in __types [type]['derived']:
result.extend (all_derived (d))
@@ -182,21 +197,25 @@ def all_derived (type):
def is_derived (type, base):
""" Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
# TODO: this isn't very efficient, especially for bases close to type
if base in all_bases (type):
return True
- else:
+ else:
return False
def is_subtype (type, base):
""" Same as is_derived. Should be removed.
"""
+ assert isinstance(type, basestring)
+ assert isinstance(base, basestring)
# TODO: remove this method
return is_derived (type, base)
@bjam_signature((["type"], ["properties", "*"], ["suffix"]))
def set_generated_target_suffix (type, properties, suffix):
- """ Sets a target suffix that should be used when generating target
+ """ Sets a target suffix that should be used when generating target
of 'type' with the specified properties. Can be called with
empty properties if no suffix for 'type' was specified yet.
This does not automatically specify that files 'suffix' have
@@ -208,17 +227,27 @@ def set_generated_target_suffix (type, properties, suffix):
The 'suffix' parameter can be empty string ("") to indicate that
no suffix should be used.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
set_generated_target_ps(1, type, properties, suffix)
-
+
def change_generated_target_suffix (type, properties, suffix):
- """ Change the suffix previously registered for this type/properties
+ """ Change the suffix previously registered for this type/properties
combination. If suffix is not yet specified, sets it.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(suffix, basestring)
change_generated_target_ps(1, type, properties, suffix)
def generated_target_suffix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
return generated_target_ps(1, type, properties)
# Sets a target prefix that should be used when generating targets of 'type'
@@ -236,16 +265,31 @@ def set_generated_target_prefix(type, properties, prefix):
# Change the prefix previously registered for this type/properties combination.
# If prefix is not yet specified, sets it.
def change_generated_target_prefix(type, properties, prefix):
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(prefix, basestring)
change_generated_target_ps(0, type, properties, prefix)
def generated_target_prefix(type, properties):
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(type, basestring)
+ assert isinstance(properties, PropertySet)
return generated_target_ps(0, type, properties)
def set_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
properties.append ('' + type)
__prefixes_suffixes[is_suffix].insert (properties, val)
def change_generated_target_ps(is_suffix, type, properties, val):
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
+ assert isinstance(val, basestring)
properties.append ('' + type)
prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
if not prev:
@@ -256,7 +300,9 @@ def change_generated_target_ps(is_suffix, type, properties, val):
# If no prefix/suffix is specified for 'type', returns prefix/suffix for
# base type, if any.
def generated_target_ps_real(is_suffix, type, properties):
-
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(properties, basestring)
result = ''
found = False
while type and not found:
@@ -278,6 +324,11 @@ def generated_target_ps(is_suffix, type, prop_set):
with the specified properties. If not suffix were specified for
'type', returns suffix for base type, if any.
"""
+ if __debug__:
+ from .property_set import PropertySet
+ assert isinstance(is_suffix, (int, bool))
+ assert isinstance(type, basestring)
+ assert isinstance(prop_set, PropertySet)
key = (is_suffix, type, prop_set)
v = __target_suffixes_cache.get(key, None)
@@ -289,14 +340,15 @@ def generated_target_ps(is_suffix, type, prop_set):
def type(filename):
""" Returns file type given it's name. If there are several dots in filename,
- tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
+ tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
"so" will be tried.
"""
+ assert isinstance(filename, basestring)
while 1:
filename, suffix = os.path.splitext (filename)
if not suffix: return None
suffix = suffix[1:]
-
+
if __suffixes_to_types.has_key(suffix):
return __suffixes_to_types[suffix]
@@ -306,6 +358,10 @@ def register_type (type, suffixes, base_type = None, os = []):
if os is not specified. This rule is injected into each of the type
modules for the sake of convenience.
"""
+ assert isinstance(type, basestring)
+ assert is_iterable_typed(suffixes, basestring)
+ assert isinstance(base_type, basestring) or base_type is None
+ assert is_iterable_typed(os, basestring)
if registered (type):
return
diff --git a/src/build/version.jam b/src/build/version.jam
index fa8fb3a56..8a1a957a1 100644
--- a/src/build/version.jam
+++ b/src/build/version.jam
@@ -6,13 +6,13 @@
import numbers ;
-.major = "2014" ;
-.minor = "03" ;
+.major = "2015" ;
+.minor = "07" ;
rule boost-build ( )
{
- return "$(.major).$(.minor)-svn" ;
+ return "$(.major).$(.minor)-git" ;
}
diff --git a/src/build/virtual_target.py b/src/build/virtual_target.py
index ac6703056..ea4b24d82 100644
--- a/src/build/virtual_target.py
+++ b/src/build/virtual_target.py
@@ -67,7 +67,7 @@ import os.path
import string
import types
-from b2.util import path, utility, set
+from b2.util import path, utility, set, is_iterable_typed
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value
from b2.util.sequence import unique
from b2.tools import common
@@ -110,6 +110,7 @@ class VirtualTargetRegistry:
and equal action. If such target is found it is retured and 'target' is not registered.
Otherwise, 'target' is registered and returned.
"""
+ assert isinstance(target, VirtualTarget)
if target.path():
signature = target.path() + "-" + target.name()
else:
@@ -156,6 +157,11 @@ class VirtualTargetRegistry:
for the project, and use that path to determine if the target was already created.
TODO: passing project with all virtual targets starts to be annoying.
"""
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(file, basestring)
+ assert isinstance(file_location, basestring)
+ assert isinstance(project, ProjectTarget)
# Check if we've created a target corresponding to this file.
path = os.path.join(os.getcwd(), file_location, file)
path = os.path.normpath(path)
@@ -192,6 +198,8 @@ class VirtualTargetRegistry:
return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)]
def register_actual_name (self, actual_name, virtual_target):
+ assert isinstance(actual_name, basestring)
+ assert isinstance(virtual_target, VirtualTarget)
if self.actual_.has_key (actual_name):
cs1 = self.actual_ [actual_name].creating_subvariant ()
cs2 = virtual_target.creating_subvariant ()
@@ -238,6 +246,9 @@ class VirtualTargetRegistry:
""" Appends the suffix appropriate to 'type/property_set' combination
to the specified name and returns the result.
"""
+ assert isinstance(specified_name, basestring)
+ assert isinstance(file_type, basestring)
+ assert isinstance(prop_set, property_set.PropertySet)
suffix = b2.build.type.generated_target_suffix (file_type, prop_set)
if suffix:
@@ -254,6 +265,10 @@ class VirtualTarget:
project: project to which this target belongs.
"""
def __init__ (self, name, project):
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(name, basestring)
+ assert isinstance(project, ProjectTarget)
self.name_ = name
self.project_ = project
self.dependencies_ = []
@@ -302,6 +317,9 @@ class VirtualTarget:
If scanner is not specified, then actual target is returned.
"""
+ if __debug__:
+ from .scanner import Scanner
+ assert scanner is None or isinstance(scanner, Scanner)
actual_name = self.actualize_no_scanner ()
if self.always_:
@@ -373,6 +391,9 @@ class AbstractFileTarget (VirtualTarget):
type: optional type of this target.
"""
def __init__ (self, name, type, project, action = None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
VirtualTarget.__init__ (self, name, project)
self.type_ = type
@@ -402,6 +423,7 @@ class AbstractFileTarget (VirtualTarget):
""" Sets the path. When generating target name, it will override any path
computation from properties.
"""
+ assert isinstance(path, basestring)
self.path_ = os.path.normpath(path)
def action (self):
@@ -413,6 +435,7 @@ class AbstractFileTarget (VirtualTarget):
""" Sets/gets the 'root' flag. Target is root is it directly correspods to some
variant of a main target.
"""
+ assert isinstance(set, (int, bool, type(None)))
if set:
self.root_ = True
return self.root_
@@ -425,6 +448,7 @@ class AbstractFileTarget (VirtualTarget):
s: If specified, specified the value to set,
which should be instance of 'subvariant' class.
"""
+ assert s is None or isinstance(s, Subvariant)
if s and not self.creating_subvariant ():
if self.creating_subvariant ():
raise BaseException ("Attempt to change 'dg'")
@@ -435,6 +459,7 @@ class AbstractFileTarget (VirtualTarget):
return self.creating_subvariant_
def actualize_action (self, target):
+ assert isinstance(target, basestring)
if self.action_:
self.action_.actualize ()
@@ -513,7 +538,7 @@ class AbstractFileTarget (VirtualTarget):
If not property is specified, or the rule specified by
returns nothing, returns the result of calling
virtual-target.add-suffix"""
-
+ assert isinstance(specified_name, basestring)
if self.action_:
ps = self.action_.properties()
else:
@@ -627,6 +652,9 @@ class FileTarget (AbstractFileTarget):
- the suffix which correspond to the target's type.
"""
def __init__ (self, name, type, project, action = None, path=None, exact=False):
+ assert isinstance(type, basestring) or type is None
+ assert action is None or isinstance(action, Action)
+ assert isinstance(exact, (int, bool))
AbstractFileTarget.__init__ (self, name, type, project, action, exact)
self.path_ = path
@@ -638,10 +666,12 @@ class FileTarget (AbstractFileTarget):
return self.name_
def clone_with_different_type(self, new_type):
+ assert isinstance(new_type, basestring)
return FileTarget(self.name_, new_type, self.project_,
self.action_, self.path_, exact=True)
def actualize_location (self, target):
+ assert isinstance(target, basestring)
engine = self.project_.manager_.engine ()
if self.action_:
@@ -714,6 +744,7 @@ class FileTarget (AbstractFileTarget):
class NotFileTarget(AbstractFileTarget):
def __init__(self, name, project, action):
+ assert isinstance(action, Action)
AbstractFileTarget.__init__(self, name, None, project, action)
def path(self):
@@ -721,6 +752,7 @@ class NotFileTarget(AbstractFileTarget):
return None
def actualize_location(self, target):
+ assert isinstance(target, basestring)
bjam.call("NOTFILE", target)
bjam.call("ALWAYS", target)
bjam.call("NOUPDATE", target)
@@ -735,8 +767,9 @@ class Action:
not establish dependency relationship, but should do everything else.
"""
def __init__ (self, manager, sources, action_name, prop_set):
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(action_name, basestring) or action_name is None
assert(isinstance(prop_set, property_set.PropertySet))
- assert type(sources) == types.ListType
self.sources_ = sources
self.action_name_ = action_name
if not prop_set:
@@ -758,11 +791,14 @@ class Action:
def add_targets (self, targets):
+ assert is_iterable_typed(targets, VirtualTarget)
self.targets_ += targets
- def replace_targets (old_targets, new_targets):
- self.targets_ = [t for t in targets if not t in old_targets] + new_targets
+ def replace_targets(self, old_targets, new_targets):
+ assert is_iterable_typed(old_targets, VirtualTarget)
+ assert is_iterable_typed(new_targets, VirtualTarget)
+ self.targets_ = [t for t in self.targets_ if not t in old_targets] + new_targets
def targets (self):
return self.targets_
@@ -826,6 +862,8 @@ class Action:
For each passed source, actualizes it with the appropriate scanner.
Returns the actualized virtual targets.
"""
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
result = []
for i in sources:
scanner = None
@@ -852,6 +890,8 @@ class Action:
New values will be *appended* to the variables. They may be non-empty,
if caller wants it.
"""
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
dependencies = self.properties_.get ('')
self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set)
@@ -879,6 +919,7 @@ class Action:
to get generated headers correctly. Default implementation returns
its argument.
"""
+ assert isinstance(prop_set, property_set.PropertySet)
return prop_set
@@ -889,6 +930,7 @@ class NullAction (Action):
actions which create them.
"""
def __init__ (self, manager, prop_set):
+ assert isinstance(prop_set, property_set.PropertySet)
Action.__init__ (self, manager, [], None, prop_set)
def actualize (self):
@@ -908,7 +950,8 @@ class NonScanningAction(Action):
Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set)
def actualize_source_type(self, sources, property_set):
-
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(property_set, property_set.PropertySet)
result = []
for s in sources:
result.append(s.actualize())
@@ -920,6 +963,9 @@ def traverse (target, include_roots = False, include_sources = False):
found during traversal, it's either included or not, dependencing of the
value of 'include_roots'. In either case, sources of root are not traversed.
"""
+ assert isinstance(target, VirtualTarget)
+ assert isinstance(include_roots, (int, bool))
+ assert isinstance(include_sources, (int, bool))
result = []
if target.action ():
@@ -951,7 +997,12 @@ def clone_action (action, new_project, new_action_name, new_properties):
and all produced target. The rule-name and properties are set
to 'new-rule-name' and 'new-properties', if those are specified.
Returns the cloned action."""
-
+ if __debug__:
+ from .targets import ProjectTarget
+ assert isinstance(action, Action)
+ assert isinstance(new_project, ProjectTarget)
+ assert isinstance(new_action_name, basestring)
+ assert isinstance(new_properties, property_set.PropertySet)
if not new_action_name:
new_action_name = action.action_name()
@@ -990,6 +1041,14 @@ class Subvariant:
sources_usage_requirements: Properties propagated from sources
created_targets: Top-level created targets
"""
+ if __debug__:
+ from .targets import AbstractTarget
+ assert isinstance(main_target, AbstractTarget)
+ assert isinstance(prop_set, property_set.PropertySet)
+ assert is_iterable_typed(sources, VirtualTarget)
+ assert isinstance(build_properties, property_set.PropertySet)
+ assert isinstance(sources_usage_requirements, property_set.PropertySet)
+ assert is_iterable_typed(created_targets, VirtualTarget)
self.main_target_ = main_target
self.properties_ = prop_set
self.sources_ = sources
@@ -1028,6 +1087,7 @@ class Subvariant:
return self.sources_usage_requirements_
def set_usage_requirements (self, usage_requirements):
+ assert isinstance(usage_requirements, property_set.PropertySet)
self.usage_requirements_ = usage_requirements
def usage_requirements (self):
@@ -1038,7 +1098,9 @@ class Subvariant:
either directly or indirectly, and either as sources,
or as dependency properties. Targets referred with
dependency property are returned a properties, not targets."""
-
+ if __debug__:
+ from .targets import GenerateResult
+ assert isinstance(result, GenerateResult)
# Find directly referenced targets.
deps = self.build_properties().dependency()
all_targets = self.sources_ + deps
@@ -1071,7 +1133,8 @@ class Subvariant:
if 'target_type' is not specified), the result will contain
<$(feature)>path-to-that-target.
"""
-
+ assert isinstance(feature, basestring)
+ assert isinstance(target_type, basestring)
if not target_type:
key = feature
else:
@@ -1088,6 +1151,7 @@ class Subvariant:
return result
def all_target_directories(self, target_type = None):
+ assert isinstance(target_type, (basestring, type(None)))
# TODO: does not appear to use target_type in deciding
# if we've computed this already.
if not self.target_directories_:
@@ -1095,6 +1159,7 @@ class Subvariant:
return self.target_directories_
def compute_target_directories(self, target_type=None):
+ assert isinstance(target_type, (basestring, type(None)))
result = []
for t in self.created_targets():
if not target_type or b2.build.type.is_derived(t.type(), target_type):
diff --git a/src/build_system.py b/src/build_system.py
index 6bd05d1d9..b5a3b2775 100644
--- a/src/build_system.py
+++ b/src/build_system.py
@@ -509,15 +509,6 @@ def main_real():
# that all project files already be loaded.
(target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties)
- # Expand properties specified on the command line into multiple property
- # sets consisting of all legal property combinations. Each expanded property
- # set will be used for a single build run. E.g. if multiple toolsets are
- # specified then requested targets will be built with each of them.
- if properties:
- expanded = build_request.expand_no_defaults(properties)
- else:
- expanded = [property_set.empty()]
-
# Check that we actually found something to build.
if not current_project and not target_ids:
get_manager().errors()("no Jamfile in current directory found, and no target references specified.")
@@ -595,6 +586,22 @@ def main_real():
global results_of_main_targets
+ # Expand properties specified on the command line into multiple property
+ # sets consisting of all legal property combinations. Each expanded property
+ # set will be used for a single build run. E.g. if multiple toolsets are
+ # specified then requested targets will be built with each of them.
+ # The expansion is being performed as late as possible so that the feature
+ # validation is performed after all necessary modules (including project targets
+ # on the command line) have been loaded.
+ if properties:
+ expanded = []
+ for p in properties:
+ expanded.extend(build_request.convert_command_line_element(p))
+
+ expanded = build_request.expand_no_defaults(expanded)
+ else:
+ expanded = [property_set.empty()]
+
# Now that we have a set of targets to build and a set of property sets to
# build the targets with, we can start the main build process by using each
# property set to generate virtual targets from all of our listed targets
diff --git a/src/contrib/boost.jam b/src/contrib/boost.jam
index 7daefd0c7..c3caa3a3b 100644
--- a/src/contrib/boost.jam
+++ b/src/contrib/boost.jam
@@ -207,33 +207,37 @@ rule boost_std ( inc ? lib ? )
alias headers ;
boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ;
+ boost_lib_std container : BOOST_CONTAINER_DYN_LINK ;
boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ;
boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ;
boost_lib_std graph : BOOST_GRAPH_DYN_LINK ;
boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ;
boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ;
boost_lib_std locale : BOOST_LOCALE_DYN_LINK ;
- boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ;
- boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ;
- boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std log : BOOST_LOG_DYN_LINK ;
+ boost_lib_std log_setup : BOOST_LOG_SETUP_DYN_LINK ;
boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ;
+ boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ;
boost_lib_std mpi : BOOST_MPI_DYN_LINK ;
+ boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ;
boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ;
boost_lib_std python : BOOST_PYTHON_DYN_LINK ;
boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ;
boost_lib_std random : BOOST_RANDOM_DYN_LINK ;
boost_lib_std regex : BOOST_REGEX_DYN_LINK ;
boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ;
- boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ;
boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ;
boost_lib_std system : BOOST_SYSTEM_DYN_LINK ;
- boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ;
- boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ;
boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ;
boost_lib_std thread : BOOST_THREAD_DYN_DLL ;
+ boost_lib_std timer : BOOST_TIMER_DYN_DLL ;
+ boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ;
boost_lib_std wave : BOOST_WAVE_DYN_LINK ;
+ boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ;
}
# Example placeholder for rules defining Boost library project & library targets
diff --git a/src/contrib/boost.py b/src/contrib/boost.py
index e256fe965..7d1f6b4d9 100644
--- a/src/contrib/boost.py
+++ b/src/contrib/boost.py
@@ -4,7 +4,7 @@
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Boost library support module.
-#
+#
# This module allows to use the boost library from boost-build projects.
# The location of a boost source tree or the path to a pre-built
# version of the library can be configured from either site-config.jam
@@ -13,15 +13,15 @@
# tree. As a last resort it tries to use pre-built libraries from the standard
# search path of the compiler.
#
-# If the location to a source tree is known, the module can be configured
+# If the location to a source tree is known, the module can be configured
# from the *-config.jam files:
#
# using boost : 1.35 : /path-to-boost-root ;
#
# If the location to a pre-built version is known:
#
-# using boost : 1.34
-# : /usr/local/include/boost_1_34
+# using boost : 1.34
+# : /usr/local/include/boost_1_34
# /usr/local/lib
# ;
#
@@ -41,7 +41,7 @@
#
# boost.use-project ;
#
-# The library can be referenced with the project identifier '/boost'. To
+# The library can be referenced with the project identifier '/boost'. To
# reference the program_options you would specify:
#
# exe myexe : mysrc.cpp : /boost//program_options ;
@@ -76,7 +76,7 @@ __debug = None
def debug():
global __debug
if __debug is None:
- __debug = "--debug-configuration" in bjam.variable("ARGV")
+ __debug = "--debug-configuration" in bjam.variable("ARGV")
return __debug
@@ -94,9 +94,9 @@ def debug():
# /path-to-include: The include directory to search.
#
# /path-to-library: The library directory to search.
-#
+#
# system or versioned.
-#
+#
# my_build_id: The custom build id to use.
#
def init(version, options = None):
@@ -130,7 +130,7 @@ rules = projects.project_rules()
# of the boost library. If the 'version' parameter is omitted either
# the configured default (first in config files) is used or an auto
# configuration will be attempted.
-#
+#
@bjam_signature(([ "version", "?" ], ))
def use_project(version = None):
projects.push_current( projects.current() )
@@ -149,7 +149,7 @@ def use_project(version = None):
root = opts.get('' )
inc = opts.get('')
lib = opts.get('')
-
+
if debug():
print "notice: using boost library {} {}".format( version, opt.raw() )
@@ -171,7 +171,7 @@ def use_project(version = None):
root = bjam.variable("BOOST_ROOT")
module = projects.current().project_module()
-
+
if root:
bjam.call('call-in-module', module, 'use-project', ['boost', root])
else:
@@ -199,14 +199,15 @@ def boost_std(inc = None, lib = None):
tag_prop_set = property_set.create([property.Property('', tag_std)])
attributes = projects.attributes(projects.current().project_module())
attributes.requirements = attributes.requirements.refine(tag_prop_set)
-
+
alias('headers')
-
+
def boost_lib(lib_name, dyn_link_macro):
if (isinstance(lib_name,str)):
lib_name = [lib_name]
builtin.lib(lib_name, usage_requirements=['shared:{}'.format(dyn_link_macro)])
-
+
+ boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' )
boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' )
boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' )
boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' )
@@ -267,7 +268,7 @@ def tag_std(name, type, prop_set):
def tag_maybe(param):
return ['-{}'.format(param)] if param else []
-
+
def tag_system(name, type, prop_set):
return common.format_name([''] + tag_maybe(__build_id), name, type, prop_set)
diff --git a/src/contrib/modular.jam b/src/contrib/modular.jam
index 917dfbaa5..cba517048 100644
--- a/src/contrib/modular.jam
+++ b/src/contrib/modular.jam
@@ -3,108 +3,11 @@
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
-#alias library
-# :
-# : : : include
-# ;
-
import path ;
import project ;
import modules ;
import regex ;
-
-rule find ( target-refs + )
-{
- process-args ;
-
- local caller-mod = [ CALLER_MODULE ] ;
- local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
- caller-dir = $(caller-dir:D) ;
- caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
-
- for local target-ref in $(target-refs)
- {
- local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ;
- local search-prefix ;
- local search-sub ;
- for local prefix in $(.search-path-prefix)
- {
- if ! $(search-prefix)
- {
- local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ;
- search-prefix = $(search-match[1]) ;
- search-sub = $(search-match[2]) ;
- }
- }
- local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ;
- found = $(found[1]) ;
- if $(found)
- {
- local lib-ref = [ regex.split $(search-sub) / ] ;
- lib-ref = $(search-prefix)/$(lib-ref[1]) ;
- local lib-path = [ path.relative-to $(caller-dir) $(found) ] ;
- library $(lib-ref) $(caller-mod) : $(lib-path) ;
- }
- }
-
- return $(target-refs) ;
-}
-
-rule library ( name caller-module ? : root )
-{
- process-args ;
-
- # Dir path of caller to base paths from.
- caller-module ?= [ CALLER_MODULE ] ;
- local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
- caller-dir = $(caller-dir:D) ;
-
- # Find the various parts of the library.
- local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ;
- local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ;
- lib-contents = $(lib-contents:D=) ;
- # "include" dir for library..
- local include-dir ;
- if "include" in $(lib-contents)
- {
- include-dir = include ;
- }
-
- # Does it look like a library?
- if $(include-dir)
- {
- # Load/create/declare library project.
- local lib-module = [ project.find $(root) : $(caller-dir) ] ;
- if ! $(lib-module)
- {
- lib-module = [ project.load
- [ path.root [ path.make $(root) ] $(caller-dir) ] : synthesize ] ;
- }
- local lib-target = [ project.target $(lib-module) ] ;
-
- # We move to the library project module to define the various
- # targets others use for the library.
- if ! [ modules.peek $(lib-module) : __library__ ]
- {
- modules.poke $(lib-module) : __library__ : $(name) ;
- project.push-current $(lib-target) ;
-
- # Declare the library alias.
- modules.call-in $(lib-module) : alias library
- : # Sources
- : # Requirements
- : # Default Build
- : # Usage Requirements
- $(include-dir)
- ;
-
- project.pop-current ;
- }
-
- # Declare project alternate ID.
- modules.call-in $(caller-module) : use-project $(name) : $(root) ;
- }
-}
+import type ;
# Add a location, i.e. directory, where to search for libraries.
# The optional 'prefix' indicates which rooted-prefixes the new
@@ -126,6 +29,167 @@ rule add-location ( dir prefix ? : base-dir ? )
.search-path.$(prefix) += [ path.root [ path.root $(dir) $(base-dir) ] [ path.pwd ] ] ;
}
+# Declares additional definitions of a modular library target external
+# to the modular library build itself. This makes it possible to externally
+# define modular libraries without modifying the library. The passed in
+# values are added on demand when the named library is first declared.
+rule external (
+ name : sources * : requirements * : default-build * :
+ usage-requirements * )
+{
+ .external.($(name)).sources = $(sources) ;
+ .external.($(name)).requirements = $(requirements) ;
+ .external.($(name)).default-build = $(default-build) ;
+ .external.($(name)).usage-requirements = $(usage-requirements) ;
+}
+
+# Find, and declare, any modular libraries referenced in the target-refs.
+# This will both load the modular libraries, and declare/manufacture
+# the modular libraries as needed.
+rule find ( target-refs + )
+{
+ process-args ;
+
+ local caller-mod = [ CALLER_MODULE ] ;
+ local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+ caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
+
+ local result-refs ;
+ for local target-ref in $(target-refs)
+ {
+ result-refs += [ resolve-reference $(target-ref)
+ : $(caller-mod) $(caller-dir) ] ;
+ }
+
+ return $(result-refs) ;
+}
+
+##############################################################################
+
+local rule resolve-reference ( target-ref : caller-mod caller-dir ? )
+{
+ # ECHO %%% modular.resolve-target-ref $(target-ref) :: $(caller-mod) $(caller-dir) ;
+ if ! $(caller-dir)
+ {
+ caller-dir = [ modules.peek $(caller-mod) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+ caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ;
+ }
+ local result-ref = $(target-ref) ;
+ local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ;
+ # if ! ( $(ref) in $(.target-refs) )
+ {
+ # .target-refs += $(ref) ;
+ local search-prefix ;
+ local search-sub ;
+ for local prefix in $(.search-path-prefix)
+ {
+ if ! $(search-prefix)
+ {
+ local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ;
+ search-prefix = $(search-match[1]) ;
+ search-sub = $(search-match[2]) ;
+ }
+ }
+
+ if $(search-prefix)
+ {
+ local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ;
+ found = $(found[1]) ;
+ if $(found)
+ {
+ local lib-ref = [ regex.split $(search-sub) / ] ;
+ lib-ref = $(search-prefix)/$(lib-ref[1]) ;
+ local lib-path = [ path.relative-to $(caller-dir) $(found) ] ;
+ define-library $(lib-ref) $(caller-mod) : $(lib-path) ;
+ }
+ }
+ }
+ return $(result-ref) ;
+}
+
+local rule define-library ( name caller-module ? : root )
+{
+ # ECHO ~~~ modular.library $(name) $(caller-module) :: $(root) :: $(depends) ;
+
+ process-args ;
+
+ # Dir path of caller to base paths from.
+ caller-module ?= [ CALLER_MODULE ] ;
+ local caller-dir = [ modules.peek $(caller-module) : __file__ ] ;
+ caller-dir = $(caller-dir:D) ;
+
+ # Find the various parts of the library.
+ local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ;
+ local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ;
+ lib-contents = $(lib-contents:D=) ;
+
+ # "include" dir for library..
+ local include-dir ;
+ if "include" in $(lib-contents)
+ {
+ include-dir = $(root)/include ;
+ }
+
+ # If it has a build dir, i.e. it has targets to build,
+ # we root the project at the build dir to make it easy
+ # to refer to the build targets. This mirrors the regular
+ # Boost organization of the project aliases.
+ if "build" in $(lib-contents)
+ {
+ root = $(root)/build ;
+ build-dir = "." ;
+ }
+
+ # Shadow target declarations so that we can alter build targets
+ # to work in the standalone modular structure.
+ local lib-location = [ path.root [ path.make $(root) ] $(caller-dir) ] ;
+ local lib-module-name = [ project.module-name $(lib-location) ] ;
+ local modular-rules = [ RULENAMES modular-rules ] ;
+ IMPORT modular-rules : $(modular-rules) : $(lib-module-name) : $(modular-rules) ;
+
+ # Load/create/declare library project.
+ local lib-module = [ project.find $(root) : $(caller-dir) ] ;
+ if ! $(lib-module)
+ {
+ # If the find was unable to load the project we synthesize it.
+ lib-module = [ project.load $(lib-location) : synthesize ] ;
+ }
+ local lib-target = [ project.target $(lib-module) ] ;
+ if ! [ modules.peek $(lib-module) : __library__ ]
+ {
+ modules.poke $(lib-module) : __library__ : $(name) ;
+ for local type in [ modules.peek type : .types ]
+ {
+ main-rule-name = [ type.type-to-rule-name $(type) ] ;
+ IMPORT modular-rules : main-target-rule : $(lib-module-name) : $(main-rule-name) ;
+ }
+ }
+
+ # Declare project alternate ID.
+ modules.call-in $(caller-module) : use-project $(name) : $(root) ;
+
+ # Create a "library" target that has basic usage info if needed.
+ if ! [ $(lib-target).has-alternative-for-target library ]
+ {
+ include-dir = [ path.relative-to $(root) $(include-dir) ] ;
+
+ project.push-current $(lib-target) ;
+
+ # Declare the library alias.
+ modules.call-in $(lib-module) : library
+ : # Sources
+ : # Requirements
+ : # Default Build
+ : # Usage Requirements
+ $(include-dir)
+ ;
+
+ project.pop-current ;
+ }
+}
+
local rule process-args ( )
{
if ! $(.did-process-args)
@@ -139,3 +203,86 @@ local rule process-args ( )
}
}
}
+
+rule apply-external (
+ mod : field : values * )
+{
+ local result ;
+ local name = [ modules.peek $(mod) : __library__ ] ;
+ values += $(.external.($(name)).$(field)) ;
+ for local value in $(values)
+ {
+ result += [ resolve-reference $(value) : $(mod) ] ;
+ }
+ return $(result) ;
+}
+
+module modular-rules
+{
+ import type ;
+ import targets ;
+ import builtin ;
+ import alias ;
+
+ # Avoids any form of installation for Boost modules.
+ rule boost-install ( libraries * ) { }
+
+ # Generic typed target rule to pre-process main target
+ # declarations to make them work within the standalone
+ # modular structure.
+ rule main-target-rule (
+ name : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ local mod = [ CALLER_MODULE ] ;
+
+ # ECHO @@@ [[$(mod)]] modular-rules.main-target-rule $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ;
+
+ # First discover the required target type based on the exact alias used to
+ # invoke this rule.
+ local bt = [ BACKTRACE 1 ] ;
+ local rulename = $(bt[4]) ;
+ local target-type = [ type.type-from-rule-name $(rulename) ] ;
+ return [ targets.create-typed-target $(target-type) : [ project.current ] :
+ $(name) : $(sources) : $(requirements) : $(default-build) :
+ $(usage-requirements) ] ;
+ }
+
+ rule lib ( names + : sources * : requirements * : default-build * :
+ usage-requirements * )
+ {
+ local mod = [ CALLER_MODULE ] ;
+ requirements +=