diff --git a/.travis.yml b/.travis.yml index e97c65567..5cc0ca33c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,15 @@ -language: c -compiler: - - gcc -script: ./bootstrap.sh +sudo: false +os: + - linux +env: + - TOOLSET=gcc TEST_ALL_EXTRAS= +# - TOOLSET=gcc TEST_ALL_EXTRAS=--extras + - TOOLSET=clang TEST_ALL_EXTRAS= +# - TOOLSET=clang TEST_ALL_EXTRAS=--extras +language: python +python: + - 2.7 + - 2.6 +script: + - ./bootstrap.sh --with-toolset=${TOOLSET} + - cd test && python test_all.py ${TOOLSET} ${TEST_ALL_EXTRAS} diff --git a/Jamroot.jam b/Jamroot.jam index 54e9cc839..2f50eed09 100644 --- a/Jamroot.jam +++ b/Jamroot.jam @@ -6,12 +6,11 @@ import package ; import os ; local ext = "" ; -if [ os.on-windows ] +if [ os.on-windows ] || [ os.on-vms ] { ext = ".exe" ; } - package.install boost-build-engine boost-build : # properties : # binaries diff --git a/bootstrap.sh b/bootstrap.sh index 8523c3db3..c99242ee8 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -57,7 +57,7 @@ my_dir="." if test "x$TOOLSET" = x; then guessed_toolset=`$my_dir/src/engine/build.sh --guess-toolset` case $guessed_toolset in - acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp ) + acc | darwin | gcc | como | mipspro | pathscale | pgi | qcc | vacpp | xlcpp ) TOOLSET=$guessed_toolset ;; diff --git a/bootstrap_vms.com b/bootstrap_vms.com new file mode 100644 index 000000000..3d8afaab9 --- /dev/null +++ b/bootstrap_vms.com @@ -0,0 +1,48 @@ +$! Copyright 2015 Artur Shepilko. +$! +$! Distributed under the Boost Software License, Version 1.0. +$! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +$! +$ THIS_FACILITY = "BOOSTBUILD" +$ +$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'") +$ save_verify = f$verify(verify) +$ save_default = f$env("DEFAULT") +$ +$ SAY := WRITE SYS$OUTPUT +$ +$ ON WARNING THEN CONTINUE +$ ON ERROR THEN GOTO ERROR +$ +$ SAY "I|Bootstrapping the build engine..." +$ +$ set def [.src.engine] +$ @build_vms /out=[--]bootstrap.log +$ +$ set def 'save_default' +$ +$ if f$search("[.src.engine.bin_vms]b2.exe") .eqs. "" then goto ERROR +$ copy [.src.engine.bin_vms]b2.exe [] +$ copy [.src.engine.bin_vms]bjam.exe [] +$ +$ SAY "I|Bootstrapping is done, B2.EXE created." +$ type sys$input +$DECK + + To build and install under ROOT: directory, run: + MC []B2 --prefix="/root" install + + Set B2 command: + B2 :== $ROOT:[BIN]B2.EXE + +$EOD +$ sts = 1 +$ +$EXIT: +$ set def 'save_default' +$ exit 'sts' + (0 * f$verify(save_verify)) + +$ERROR: +$ SAY "E|Failed to bootstrap build engine, see BOOTSTRAP.LOG for details." +$ sts = 4 +$ goto EXIT diff --git a/doc/bjam.qbk b/doc/bjam.qbk index a57a44021..6b754d13d 100644 --- a/doc/bjam.qbk +++ b/doc/bjam.qbk @@ -71,7 +71,7 @@ cd /jam source location/ sh ./build.sh ] -For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/v2/engine=. +For the Boost.Jam source included with the Boost distribution the /jam source location/ is =BOOST_ROOT/tools/build/src/engine=. If the scripts fail to detect an appropriate toolset to build with your particular toolset may not be auto-detectable. In that case, you can specify the toolset as the first argument, this assumes that the toolset is readily available in the =PATH=. @@ -421,7 +421,7 @@ This facility is useful for correct header file scanning, since many compilers w The basic =b2= language entity is called a rule. A rule is defined in two parts: the procedure and the actions. The procedure is a body of jam statements to be run when the rule is invoked; the actions are the OS shell commands to execute when updating the built targets of the rule. -Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments). Note that 'return' doesn't actually cause a return, i.e., is a no-op unless it is the last statement of the last block executed within rule body. +Rules can return values, which can be expanded into a list with "[ /rule/ /args/ ... ]". A rule's value is the value of its last statement, though only the following statements have values: 'if' (value of the leg chosen), 'switch' (value of the case chosen), set (value of the resulting variable), and 'return' (value of its arguments). The =b2= statements for defining and invoking rules are as follows: @@ -727,7 +727,23 @@ rule GLOB ( /directories/ * : /patterns/ * : /downcase-opt/ ? ) Using the same wildcards as for the patterns in the switch statement. It is invoked by being used as an argument to a rule invocation inside of "=[ ]=". For example: "[^FILES = \[ GLOB dir1 dir2 : *.c *.h \]]" sets =FILES= to the list of C source and header files in =dir1= and =dir2=. The resulting filenames are the full pathnames, including the directory, but the pattern is applied only to the file name without the directory. -If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, filenames are always downcased before matching. +If /downcase-opt/ is supplied, filenames are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT and Cygwin, and OpenVMS, filenames are always downcased before matching. + +[endsect] + +[section =GLOB_ARCHIVE= ] + +The =GLOB_ARCHIVE= rule does name globbing of object archive members. + +[pre +rule GLOB_ARCHIVE ( /archives/ * : /member-patterns/ * : /downcase-opt/ ? : /symbol-patterns/ ? ) +] + +Similarly to =GLOB=, this rule is used to match names of member files in an archive (static object library). List of successfully matched members is returned or null otherwise. The resulting member names are qualified with pathname of the containing archive in the form =archive-path(member-name)=. Member patterns are for matching member name only; when no wildcards specified -- an exact match is assumed. Member names generally correspond to object file names and as such are platform-specific -- use of platform-defined object suffix in the matching patterns can allow for portability. + +If /downcase-opt/ is supplied, the member names are converted to all-lowercase before matching against the pattern; you can use this to do case-insensitive matching using lowercase patterns. The paths returned will still have mixed case if the OS supplies them. On Windows NT, Cygwin, and OpenVMS, filenames are always downcased before matching. + +Additionally, members can be matched with symbol/function patterns on supported platforms (currently, OpenVMS only). In this case, members containing the matching symbols are returned. Member and symbol patterns are applied as OR conditions, with member patterns taking precedence. On unsupported platforms, null is returned when any symbol patterns are specified. [endsect] @@ -841,6 +857,7 @@ rule SHELL ( /command/ : * ) [variablelist [[=exit-status=] [In addition to the output the result status of the executed command is returned as a second element of the result.]] [[=no-output=] [Don't capture the output of the command. Instead an empty ("") string value is returned in place of the output.]] + [[=strip-eol=] [Remove trailing end-of-line character from output, if any.]] ] Because the Perforce/Jambase defines a =SHELL= rule which hides the @@ -1000,7 +1017,7 @@ Creates new /vars/ inside to the enclosing ={}= block, obscuring any previous va return /values/ ; ] -Within a rule body, the return statement sets the return value for an invocation of the rule. It does *not* cause the rule to return; a rule's value is actually the value of the last statement executed, so a return should be the last statement executed before the rule "naturally" returns. +Within a rule body, the return statement sets the return value for an invocation of the rule and returns to the caller. [pre switch /value/ @@ -1038,6 +1055,18 @@ while /cond/ { /statements/ } Repeatedly execute /statements/ while /cond/ remains true upon entry. (See the description of /cond/ expression syntax under if, above). +[pre +break ; +] + +Immediately exits the nearest enclosing while or for loop. + +[pre +continue ; +] + +Jumps to the top of the nearest enclosing while or for loop. + [endsect] [section Variables] @@ -1149,12 +1178,19 @@ prints [^"C:/Program Files/Borland"] it can be important to pass them true windows-style paths. The =:W= modifier, *under Cygwin only*, turns a cygwin path into a Win32 path using the [@http://www.cygwin.com/cygwin-api/func-cygwin-conv-to-win32-path.html - =cygwin_conv_to_win32_path=] function. On other platforms, the string is - unchanged. For example + =cygwin_conv_to_win32_path=] function. For example `` x = "/cygdrive/c/Program Files/Borland" ; ECHO $(x:W) ; `` prints [^"C:\\Program Files\\Borland"] on Cygwin + +Similarly, when used on OpenVMS, the =:W= modifier translates a POSIX-style path into native VMS-style format using =decc$to_vms= CRTL function. This modifier is generally used inside action blocks to properly specify file paths in VMS-specific commands. For example +`` + x = "subdir/filename.c" ; ECHO $(x:W) ; +`` +prints [^"\[.subdir\]filename.c"] on OpenVMS + +On other platforms, the string is unchanged. ]] [[[^:['chars]]] diff --git a/doc/jamfile.jam b/doc/jamfile.jam index 22d67e62a..e61017d65 100644 --- a/doc/jamfile.jam +++ b/doc/jamfile.jam @@ -5,7 +5,7 @@ import quickbook ; using boostbook ; -project tools/build/v2/doc +project tools/build/doc ; boostbook userman : src/standalone.xml diff --git a/doc/src/install.xml b/doc/src/install.xml index 40c8b5ccb..8a272c73d 100644 --- a/doc/src/install.xml +++ b/doc/src/install.xml @@ -45,7 +45,7 @@ If you are not using a Boost.Build package, but rather the version bundled with the Boost C++ Libraries, the above commands should be run - in the tools/build/v2 directory. + in the tools/build directory. Now that Boost.Build is installed, you can try some of the examples. Copy @@ -97,7 +97,7 @@ Boost.Build release package, except for jam_src directory. If you're using Boost CVS to obtain Boost.Build, as opposed to release package, take - everything from the tools/build/v2 directory. + everything from the tools/build directory. For a check, make sure that /usr/share/boost-build/boost-build.jam is installed. diff --git a/doc/src/overview.xml b/doc/src/overview.xml index 6dbb38a84..1c2310b96 100644 --- a/doc/src/overview.xml +++ b/doc/src/overview.xml @@ -728,7 +728,7 @@ b2 toolset=gcc variant=debug optimization=space - Do no execute the commands, only print them. + Do not execute the commands, only print them. @@ -775,7 +775,7 @@ b2 toolset=gcc variant=debug optimization=space - Supress all informational messages. + Suppress all informational messages. diff --git a/doc/src/reference.xml b/doc/src/reference.xml index 9ac0bfa4d..e258985ee 100644 --- a/doc/src/reference.xml +++ b/doc/src/reference.xml @@ -126,12 +126,12 @@ rule check-target-builds ( target message ? : true-properties * : false-properti This function can only be used when passing requirements or usage requirements to a metatarget rule. For example, to make an application link - to a library if it's avavailable, one has use the following: + to a library if it's available, one has use the following: exe app : app.cpp : [ check-target-builds has_foo "System has foo" : <library>foo : <define>FOO_MISSING=1 ] ; - For another example, the alias rule can be used to consolidate configuraiton + For another example, the alias rule can be used to consolidate configuration choices and make them available to other metatargets, like so: alias foobar : : : : [ check-target-builds has_foo "System has foo" : <library>foo : <library>bar ] ; @@ -221,7 +221,7 @@ ECHO [ glob-tree *.cpp : .svn ] ; always always building a metatarget - The always funciton takes a single + The always function takes a single parameter—a list of metatarget names. The top-level targets produced by the named metatargets will be always considered out of date. Consider this example: @@ -366,7 +366,7 @@ path-constant DATA : data/a.txt ; static - A feature controling how libraries are built. + A feature controlling how libraries are built. @@ -529,7 +529,7 @@ path-constant DATA : data/a.txt ; Specify an additional directory where the system should look for shared libraries when the executable or shared library is run. This feature only affects Unix - compilers. Plase see + compilers. Please see in for details. @@ -551,7 +551,7 @@ path-constant DATA : data/a.txt ; As the result, the executable can be run without changing system paths to shared libraries or installing the libraries to system paths. This is very - convenient during development. Plase see the FAQ entry for details. Note that on Mac OSX, the paths are unconditionally hardcoded by the linker, and it is not possible to disable that behaviour. @@ -725,8 +725,27 @@ path-constant DATA : data/a.txt ; architecture + + Allowed values: + x86, + ia64, + sparc, + power, + mips1, + mips2, + mips3, + mips4, + mips32, + mips32r2, + mips64, + parisc, + arm, + combined, + combined-x86-power. + + The architecture features specifies - the general processor familty to generate code for. + the general processor family to generate code for. @@ -1082,7 +1101,7 @@ using msvc : &toolset_ops; ; script has been explicitly specified for the current target platform. Used setup script will be passed the target platform identifier (x86, x86_amd64, x86_ia64, amd64 or ia64) as a - arameter. If not specified a default script is chosen based on the + parameter. If not specified a default script is chosen based on the used compiler binary, e.g. vcvars32.bat or vsvars32.bat. @@ -1361,7 +1380,7 @@ using como-linux : &toolset_ops; ; originally produced by Metrowerks and presently developed by Freescale. Boost.Build supports only the versions of the compiler that target x86 processors. All such versions were released by Metrowerks - before aquisition and are not sold any longer. The last version known + before acquisition and are not sold any longer. The last version known to work is 9.4. The module is initialized using the following syntax: @@ -1491,7 +1510,7 @@ using sun : &toolset_ops; ; When using this compiler on complex C++ code, such as the Boost C++ library, it is - recommended to specify the following options when intializing the + recommended to specify the following options when initializing the sun module: -library=stlport4 -features=tmplife -features=tmplrefstatic @@ -1547,7 +1566,7 @@ using vacpp ; The STLport library is an alternative implementation of C++ runtime library. Boost.Build - supports using that library on Windows platfrom. Linux is + supports using that library on Windows platform. Linux is hampered by different naming of libraries in each STLport version and is not officially supported. @@ -2213,8 +2232,8 @@ import path : native make : native-path make-path ; If there's one viable alternative, it's choosen. Otherwise, an attempt is made to find one best alternative. An alternative - a is better than another alternative b, iff the set of properties - in b's condition is a strict subset of the set of properities of + a is better than another alternative b, if the set of properties + in b's condition is a strict subset of the set of properties of 'a's condition. If there's one viable alternative, which is better than all others, it's selected. Otherwise, an error is reported. @@ -2254,7 +2273,7 @@ import path : native make : native-path make-path ; request or requirements. If requirements include a conditional property, and - condiiton of this property is true in context of common + condition of this property is true in context of common properties, then the conditional property should be in common properties as well. @@ -2284,7 +2303,7 @@ exe a : a.cpp Several factors determine the location of a concrete file target. All files in a project are built under - the directory bin unless this is overriden by the build-dir project + the directory bin unless this is overridden by the build-dir project attribute. Under bin is a path that depends on the properties used to build each target. This path is uniquely determined by all non-free, non-incidental properties. For example, @@ -2360,7 +2379,7 @@ exe a : a.cpp subvalue1...subvalueN are legal values of some of F's subfeatures. For example, the properties <toolset>gcc <toolset-version>3.0.1 can be - expressed more conscisely using a value-string, as + expressed more concisely using a value-string, as <toolset>gcc-3.0.1. A property set is a set of properties (i.e. a @@ -2434,7 +2453,7 @@ exe a : a.cpp propagated property, the build systems attempts to use the same property when building any of its dependencies as part of that main - target. For instance, when an optimized exectuable is + target. For instance, when an optimized executable is requested, one usually wants it to be linked with optimized libraries. Thus, the <optimization> feature is propagated. @@ -2476,7 +2495,7 @@ exe a : a.cpp Normally a feature only generates a subvariant directory when its value differs from its default value, - leading to an assymmetric subvariant directory structure for + leading to an asymmetric subvariant directory structure for certain values of the feature. A symmetric feature always generates a corresponding subvariant directory. @@ -2670,7 +2689,7 @@ lib/b.cpp -- regular file - It emphasises that projects and targets are different things. + It emphasis that projects and targets are different things. @@ -2686,7 +2705,7 @@ lib/b.cpp -- regular file of targets, and it's reasonable to use them directly from other project. - 2. The rule for unpacking tar is inplemented in terms of + 2. The rule for unpacking tar is implemented in terms of "patch-file", for maintainability, and therefore, must use main target name that contains slashes? diff --git a/doc/src/tutorial.xml b/doc/src/tutorial.xml index e3cb41879..3227a6718 100644 --- a/doc/src/tutorial.xml +++ b/doc/src/tutorial.xml @@ -520,8 +520,8 @@ lib utils : utils.cpp /boost/filesystem//fs ; lib core : core.cpp utils ; exe app : app.cpp core ; This works no matter what kind of linking is used. When core - is built as a shared library, it is linked directly into - utils. Static libraries can't link to other + is built as a shared library, links utils + directly into it. Static libraries can't link to other libraries, so when core is built as a static library, its dependency on utils is passed along to core's dependents, causing app diff --git a/example/build-id/Jamroot.jam b/example/build-id/Jamroot.jam new file mode 100644 index 000000000..e69de29bb diff --git a/example/built_tool/core/Jamfile.jam b/example/built_tool/core/Jamfile.jam index 2d96f7182..d4ec62382 100644 --- a/example/built_tool/core/Jamfile.jam +++ b/example/built_tool/core/Jamfile.jam @@ -1,5 +1,6 @@ import toolset ; +import os ; project : requirements ../tblgen//tblgen ; @@ -28,3 +29,11 @@ actions tblgen bind COMMAND { $(COMMAND:E=tblgen) > $(<) } + +if [ os.name ] = VMS +{ + actions tblgen bind COMMAND + { + PIPE MCR $(COMMAND:WE=tblgen) > $(<:W) + } +} diff --git a/example/customization/inline_file.py b/example/customization/inline_file.py index a48c5fc9d..9f13acd87 100644 --- a/example/customization/inline_file.py +++ b/example/customization/inline_file.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import sys from string import strip @@ -38,7 +38,7 @@ else: file_to_include = sys.argv[2] in_file = open(file_to_include, "r"); - variable_name = strip(in_file.readline()) + variable_name = strip(in_file.readline()) out_file.write("extern const char %s[] = {\n%s};\n\n" % (variable_name, quote_file(in_file))) in_file.close() out_file.close() diff --git a/example/customization/verbatim.jam b/example/customization/verbatim.jam index 931fdce33..700aafb9a 100644 --- a/example/customization/verbatim.jam +++ b/example/customization/verbatim.jam @@ -8,6 +8,8 @@ # which are relevant to your case, remove everything else, and then change names # and actions to taste. +import os ; + # Declare a new target type. This allows Boost.Build to do something sensible # when targets with the .verbatim extension are found in sources. import type ; @@ -49,3 +51,11 @@ actions inline-file { "./inline_file.py" $(<) $(>) } + +if [ os.name ] = VMS +{ + actions inline-file + { + python inline_file.py $(<:W) $(>:W) + } +} diff --git a/example/customization/verbatim.py b/example/customization/verbatim.py index be285976c..089bd3831 100644 --- a/example/customization/verbatim.py +++ b/example/customization/verbatim.py @@ -1,6 +1,6 @@ -# Copyright 2010 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2010 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # This file is only used with Python port of Boost.Build diff --git a/example/generator/soap.jam b/example/generator/soap.jam index d28bfdecc..b3d9e7633 100644 --- a/example/generator/soap.jam +++ b/example/generator/soap.jam @@ -14,6 +14,7 @@ import generators ; import feature ; import common ; import "class" : new ; +import os ; type.register GCI : gci ; @@ -75,3 +76,11 @@ actions touch { $(TOUCH) $(<) } + +if [ os.name ] = VMS +{ + actions touch + { + $(TOUCH) $(<:W) + } +} diff --git a/example/make/jamroot.jam b/example/make/jamroot.jam index 7bb98e353..3f5ec5b56 100644 --- a/example/make/jamroot.jam +++ b/example/make/jamroot.jam @@ -1,5 +1,6 @@ import feature ; import toolset ; +import os ; path-constant HERE : . ; make main.cpp : main_cpp.pro : @do-something ; @@ -11,3 +12,11 @@ actions do-something { "$(PYTHON:E=python)" "$(HERE)/foo.py" "$(>)" "$(<)" } + +if [ os.name ] = VMS +{ + actions do-something + { + $(PYTHON:E=python) $(HERE:W)foo.py $(>:W) $(<:W) + } +} diff --git a/example/testing/compile-fail.cpp b/example/testing/compile-fail.cpp index cd3e09409..a219fa5c6 100644 --- a/example/testing/compile-fail.cpp +++ b/example/testing/compile-fail.cpp @@ -8,9 +8,10 @@ // #include +#include int main() { std::cout << "Bye!\n"; - return 1 + return EXIT_FAILURE } diff --git a/example/testing/fail.cpp b/example/testing/fail.cpp index f1efa1ee2..965661188 100644 --- a/example/testing/fail.cpp +++ b/example/testing/fail.cpp @@ -8,9 +8,10 @@ // #include +#include int main() { std::cout << "Bye!\n"; - return 1; + return EXIT_FAILURE; } diff --git a/example/testing/success.cpp b/example/testing/success.cpp index e2fa7a4a9..bf5588062 100644 --- a/example/testing/success.cpp +++ b/example/testing/success.cpp @@ -8,9 +8,10 @@ // #include +#include int main() { std::cout << "Hi!\n"; - return 0; + return EXIT_SUCCESS; } diff --git a/src/build-system.jam b/src/build-system.jam index 247326a96..76db2d377 100644 --- a/src/build-system.jam +++ b/src/build-system.jam @@ -555,6 +555,10 @@ local rule should-clean-project ( project ) { default-toolset = msvc ; } + else if [ os.name ] = VMS + { + default-toolset = vmsdecc ; + } else if [ os.name ] = MACOSX { default-toolset = darwin ; @@ -585,26 +589,6 @@ local rule should-clean-project ( project ) local properties = [ $(build-request).get-at 2 ] ; - # Expand properties specified on the command line into multiple property - # sets consisting of all legal property combinations. Each expanded property - # set will be used for a single build run. E.g. if multiple toolsets are - # specified then requested targets will be built with each of them. - if $(properties) - { - expanded = [ build-request.expand-no-defaults $(properties) ] ; - local xexpanded ; - for local e in $(expanded) - { - xexpanded += [ property-set.create [ feature.split $(e) ] ] ; - } - expanded = $(xexpanded) ; - } - else - { - expanded = [ property-set.empty ] ; - } - - # Check that we actually found something to build. if ! $(current-project) && ! $(target-ids) { @@ -695,6 +679,29 @@ local rule should-clean-project ( project ) configure.set-log-file $(first-build-build-dir)/config.log ; config-cache.load $(first-build-build-dir)/project-cache.jam ; + # Expand properties specified on the command line into multiple property + # sets consisting of all legal property combinations. Each expanded property + # set will be used for a single build run. E.g. if multiple toolsets are + # specified then requested targets will be built with each of them. + # The expansion is being performed as late as possible so that the feature + # validation is performed after all necessary modules (including project targets + # on the command line) have been loaded. + if $(properties) + { + expanded += [ build-request.convert-command-line-elements $(properties) ] ; + expanded = [ build-request.expand-no-defaults $(expanded) ] ; + local xexpanded ; + for local e in $(expanded) + { + xexpanded += [ property-set.create [ feature.split $(e) ] ] ; + } + expanded = $(xexpanded) ; + } + else + { + expanded = [ property-set.empty ] ; + } + # Now that we have a set of targets to build and a set of property sets to # build the targets with, we can start the main build process by using each # property set to generate virtual targets from all of our listed targets diff --git a/src/build/ac.jam b/src/build/ac.jam index 71bc16c37..c4bd6b7e1 100644 --- a/src/build/ac.jam +++ b/src/build/ac.jam @@ -16,6 +16,7 @@ import virtual-target ; import generators ; import property ; import print ; +import regex ; project.initialize $(__name__) ; .project = [ project.current ] ; @@ -43,9 +44,13 @@ rule find-include-path ( properties : header : provided-path ? ) else { local a = [ class.new action : ac.generate-include : [ property-set.create $(header) ] ] ; - local cpp = [ class.new file-target $(header).cpp exact : CPP : $(.project) : $(a) ] ; + # Create a new CPP target named after the header. + # Replace dots (".") in target basename for portability. + local basename = [ regex.replace $(header:D=) "[.]" "_" ] ; + local header-target = $(header:S=:B=$(basename)) ; + local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ; cpp = [ virtual-target.register $(cpp) ] ; - local result = [ generators.construct $(.project) $(header) : OBJ : $(properties) : $(cpp) : true ] ; + local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ; local jam-targets ; for t in $(result[2-]) { diff --git a/src/build/alias.py b/src/build/alias.py index 575e53609..e9078c746 100755 --- a/src/build/alias.py +++ b/src/build/alias.py @@ -1,13 +1,13 @@ -# Copyright 2003, 2004, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003, 2004, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Status: ported (danielw) # Base revision: 56043 # This module defines the 'alias' rule and associated class. # -# Alias is just a main target which returns its source targets without any +# Alias is just a main target which returns its source targets without any # processing. For example:: # # alias bin : hello test_hello ; @@ -18,7 +18,7 @@ # alias platform-src : win.cpp : NT ; # alias platform-src : linux.cpp : LINUX ; # exe main : main.cpp platform-src ; -# +# # Lastly, it's possible to create local alias for some target, with different # properties:: # @@ -29,7 +29,7 @@ import targets import property_set from b2.manager import get_manager -from b2.util import metatarget +from b2.util import metatarget, is_iterable_typed class AliasTarget(targets.BasicTarget): @@ -37,9 +37,17 @@ class AliasTarget(targets.BasicTarget): targets.BasicTarget.__init__(self, *args) def construct(self, name, source_targets, properties): + if __debug__: + from .virtual_target import VirtualTarget + assert isinstance(name, basestring) + assert is_iterable_typed(source_targets, VirtualTarget) + assert isinstance(properties, property_set.PropertySet) return [property_set.empty(), source_targets] def compute_usage_requirements(self, subvariant): + if __debug__: + from .virtual_target import Subvariant + assert isinstance(subvariant, Subvariant) base = targets.BasicTarget.compute_usage_requirements(self, subvariant) # Add source's usage requirement. If we don't do this, "alias" does not # look like 100% alias. @@ -47,7 +55,11 @@ class AliasTarget(targets.BasicTarget): @metatarget def alias(name, sources=[], requirements=[], default_build=[], usage_requirements=[]): - + assert isinstance(name, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) project = get_manager().projects().current() targets = get_manager().targets() diff --git a/src/build/build-request.jam b/src/build/build-request.jam index 2a1bbb467..3110713b7 100644 --- a/src/build/build-request.jam +++ b/src/build/build-request.jam @@ -150,8 +150,7 @@ rule from-command-line ( command-line * ) if [ MATCH "(.*=.*)" : $(e) ] || [ looks-like-implicit-value $(e:D=) : $(feature-space) ] { - properties += [ convert-command-line-element $(e) : - $(feature-space) ] ; + properties += $(e) ; } else if $(e) { @@ -169,9 +168,22 @@ rule from-command-line ( command-line * ) } -# Converts one element of command line build request specification into internal +# Converts a list of elements of command line build request specification into internal # form. Expects all the project files to already be loaded. # +rule convert-command-line-elements ( elements * ) +{ + local result ; + for local e in $(elements) + { + result += [ convert-command-line-element $(e) ] ; + } + return $(result) ; +} + + +# Converts one element of command line build request specification into internal +# form. local rule convert-command-line-element ( e ) { local result ; @@ -286,37 +298,60 @@ rule __test__ ( ) local r ; - r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; - assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug dynamic ; - try ; { - build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ; + r = [ build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ] ; + build-request.convert-command-line-elements [ $(r).get-at 2 ] ; } catch \"static\" is not an implicit feature value ; + r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; + assert.equal [ $(r).get-at 1 ] : ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic ] + : debug dynamic ; + r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; assert.equal [ $(r).get-at 1 ] : target ; - assert.equal [ $(r).get-at 2 ] : debug dynamic ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic ] + : debug dynamic ; r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug dynamic static ; + assert.equal [ $(r).get-at 2 ] : debug runtime-link=dynamic,static ; + + assert.equal + [ build-request.convert-command-line-elements debug runtime-link=dynamic,static ] + : debug dynamic static ; r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic - gcc/static ; + assert.equal [ $(r).get-at 2 ] : debug gcc/runtime-link=dynamic,static ; + + assert.equal + [ build-request.convert-command-line-elements debug gcc/runtime-link=dynamic,static ] + : debug gcc/dynamic gcc/static ; r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; assert.equal [ $(r).get-at 1 ] : ; - assert.equal [ $(r).get-at 2 ] : msvc gcc/static - borland/static ; + assert.equal [ $(r).get-at 2 ] : msvc gcc,borland/runtime-link=static ; + + assert.equal + [ build-request.convert-command-line-elements msvc gcc,borland/runtime-link=static ] + : msvc gcc/static borland/static ; r = [ build-request.from-command-line bjam gcc-3.0 ] ; assert.equal [ $(r).get-at 1 ] : ; assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; + assert.equal + [ build-request.convert-command-line-elements gcc-3.0 ] + : gcc-3.0 ; + feature.finish-test build-request-test-temp ; } diff --git a/src/build/build_request.py b/src/build/build_request.py index 118033e1e..194251688 100644 --- a/src/build/build_request.py +++ b/src/build/build_request.py @@ -11,18 +11,20 @@ import b2.build.feature feature = b2.build.feature from b2.util.utility import * +from b2.util import is_iterable_typed import b2.build.property_set as property_set def expand_no_defaults (property_sets): """ Expand the given build request by combining all property_sets which don't specify conflicting non-free features. """ + assert is_iterable_typed(property_sets, property_set.PropertySet) # First make all features and subfeatures explicit expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets] - + # Now combine all of the expanded property_sets product = __x_product (expanded_property_sets) - + return [property_set.create(p) for p in product] @@ -30,6 +32,7 @@ def __x_product (property_sets): """ Return the cross-product of all elements of property_sets, less any that would contain conflicting values for single-valued features. """ + assert is_iterable_typed(property_sets, property_set.PropertySet) x_product_seen = set() return __x_product_aux (property_sets, x_product_seen)[0] @@ -42,8 +45,10 @@ def __x_product_aux (property_sets, seen_features): Returns a tuple of: - list of lists of Property instances, such that within each list, no two Property instance have the same feature, and no Property is for feature in seen_features. - - set of features we saw in property_sets + - set of features we saw in property_sets """ + assert is_iterable_typed(property_sets, property_set.PropertySet) + assert isinstance(seen_features, set) if not property_sets: return ([], set()) @@ -76,7 +81,7 @@ def __x_product_aux (property_sets, seen_features): result.append(properties + inner) else: result.append(properties) - + if inner_seen & these_features: # Some of elements in property_sets[1:] conflict with elements of property_sets[0], # Try again, this time omitting elements of property_sets[0] @@ -85,11 +90,12 @@ def __x_product_aux (property_sets, seen_features): return (result, inner_seen | these_features) - + def looks_like_implicit_value(v): """Returns true if 'v' is either implicit value, or the part before the first '-' symbol is implicit value.""" + assert isinstance(v, basestring) if feature.is_implicit_value(v): return 1 else: @@ -104,7 +110,7 @@ def from_command_line(command_line): and constructs build request from it. Returns a list of two lists. First is the set of targets specified in the command line, and second is the set of requested build properties.""" - + assert is_iterable_typed(command_line, basestring) targets = [] properties = [] @@ -112,17 +118,17 @@ def from_command_line(command_line): if e[:1] != "-": # Build request spec either has "=" in it, or completely # consists of implicit feature values. - if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]): - properties += convert_command_line_element(e) + if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]): + properties.append(e) elif e: targets.append(e) return [targets, properties] - + # Converts one element of command line build request specification into # internal form. def convert_command_line_element(e): - + assert isinstance(e, basestring) result = None parts = e.split("/") for p in parts: @@ -133,7 +139,7 @@ def convert_command_line_element(e): lresult = [("<%s>%s" % (feature, v)) for v in values] else: lresult = p.split(",") - + if p.find('-') == -1: # FIXME: first port property.validate # property.validate cannot handle subfeatures, @@ -149,68 +155,68 @@ def convert_command_line_element(e): return [property_set.create(b2.build.feature.split(r)) for r in result] -### +### ### rule __test__ ( ) ### { ### import assert feature ; -### +### ### feature.prepare-test build-request-test-temp ; -### +### ### import build-request ; ### import build-request : expand_no_defaults : build-request.expand_no_defaults ; ### import errors : try catch ; ### import feature : feature subfeature ; -### +### ### feature toolset : gcc msvc borland : implicit ; ### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 ### 3.0 3.0.1 3.0.2 : optional ; -### +### ### feature variant : debug release : implicit composite ; ### feature inlining : on off ; ### feature "include" : : free ; -### +### ### feature stdlib : native stlport : implicit ; -### +### ### feature runtime-link : dynamic static : symmetric ; -### -### +### +### ### local r ; -### -### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; +### +### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ; ### assert.equal [ $(r).get-at 1 ] : ; ### assert.equal [ $(r).get-at 2 ] : debug dynamic ; -### +### ### try ; ### { -### +### ### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ; ### } ### catch \"static\" is not a value of an implicit feature ; -### -### +### +### ### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ; ### assert.equal [ $(r).get-at 1 ] : target ; ### assert.equal [ $(r).get-at 2 ] : debug dynamic ; -### +### ### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ; ### assert.equal [ $(r).get-at 1 ] : ; ### assert.equal [ $(r).get-at 2 ] : debug dynamic static ; -### +### ### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ; ### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic +### assert.equal [ $(r).get-at 2 ] : debug gcc/dynamic ### gcc/static ; -### +### ### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ; ### assert.equal [ $(r).get-at 1 ] : ; -### assert.equal [ $(r).get-at 2 ] : msvc gcc/static +### assert.equal [ $(r).get-at 2 ] : msvc gcc/static ### borland/static ; -### +### ### r = [ build-request.from-command-line bjam gcc-3.0 ] ; ### assert.equal [ $(r).get-at 1 ] : ; ### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ; -### +### ### feature.finish-test build-request-test-temp ; ### } -### -### +### +### diff --git a/src/build/config-cache.jam b/src/build/config-cache.jam index 5297dbb84..ff69ff6f6 100644 --- a/src/build/config-cache.jam +++ b/src/build/config-cache.jam @@ -7,6 +7,7 @@ import errors ; import regex ; import path ; import project ; +import os ; rule get ( name ) { @@ -49,6 +50,14 @@ actions write @($(STDOUT):E=$(FILE_CONTENTS:J=)) > "$(<)" } +if [ os.name ] = VMS +{ + actions write + { + @($(STDOUT):E=$(FILE_CONTENTS:J=)) | TYPE SYS$INPUT /OUT=$(<:W) + } +} + rule load ( cache-file ) { if $(.cache-file) diff --git a/src/build/configure.py b/src/build/configure.py index 0426832c4..10afb8209 100644 --- a/src/build/configure.py +++ b/src/build/configure.py @@ -10,17 +10,17 @@ # This module defines function to help with two main tasks: # # - Discovering build-time configuration for the purposes of adjusting -# build process. +# build process. # - Reporting what is built, and how it is configured. import b2.build.property as property import b2.build.property_set as property_set -import b2.build.targets +from b2.build import targets as targets_ from b2.manager import get_manager from b2.util.sequence import unique -from b2.util import bjam_signature, value_to_jam +from b2.util import bjam_signature, value_to_jam, is_iterable import bjam import os @@ -41,17 +41,22 @@ __log_fd = -1 def register_components(components): """Declare that the components specified by the parameter exist.""" + assert is_iterable(components) __components.extend(components) - + def components_building(components): """Declare that the components specified by the parameters will be build.""" + assert is_iterable(components) __built_components.extend(components) def log_component_configuration(component, message): """Report something about component configuration that the user should better know.""" + assert isinstance(component, basestring) + assert isinstance(message, basestring) __component_logs.setdefault(component, []).append(message) def log_check_result(result): + assert isinstance(result, basestring) global __announced_checks if not __announced_checks: print "Performing configuration checks" @@ -60,7 +65,9 @@ def log_check_result(result): print result def log_library_search_result(library, result): - log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width)) + assert isinstance(library, basestring) + assert isinstance(result, basestring) + log_check_result((" - %(library)s : %(result)s" % locals()).rjust(__width)) def print_component_configuration(): @@ -84,6 +91,10 @@ def builds(metatarget_reference, project, ps, what): # Attempt to build a metatarget named by 'metatarget-reference' # in context of 'project' with properties 'ps'. # Returns non-empty value if build is OK. + assert isinstance(metatarget_reference, basestring) + assert isinstance(project, targets_.ProjectTarget) + assert isinstance(ps, property_set.PropertySet) + assert isinstance(what, basestring) result = [] @@ -93,12 +104,12 @@ def builds(metatarget_reference, project, ps, what): result = False __builds_cache[(what, ps)] = False - targets = b2.build.targets.generate_from_reference( + targets = targets_.generate_from_reference( metatarget_reference, project, ps).targets() jam_targets = [] for t in targets: jam_targets.append(t.actualize()) - + x = (" - %s" % what).rjust(__width) if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"): __builds_cache[(what, ps)] = True @@ -112,6 +123,7 @@ def builds(metatarget_reference, project, ps, what): return existing def set_log_file(log_file_name): + assert isinstance(log_file_name, basestring) # Called by Boost.Build startup code to specify name of a file # that will receive results of configure checks. This # should never be called by users. @@ -134,7 +146,7 @@ class CheckTargetBuildsWorker: self.false_properties = property.create_from_strings(false_properties, True) def check(self, ps): - + assert isinstance(ps, property_set.PropertySet) # FIXME: this should not be hardcoded. Other checks might # want to consider different set of features as relevant. toolset = ps.get('toolset')[0] @@ -146,7 +158,7 @@ class CheckTargetBuildsWorker: ps.get_properties("architecture") rps = property_set.create(relevant) t = get_manager().targets().current() - p = t.project() + p = t.project() if builds(self.target, p, rps, "%s builds" % self.target): choosen = self.true_properties else: diff --git a/src/build/engine.py b/src/build/engine.py index 35333eaa0..4c2c97eaf 100644 --- a/src/build/engine.py +++ b/src/build/engine.py @@ -10,46 +10,50 @@ import operator import re import b2.build.property_set as property_set -import b2.util -class BjamAction: +from b2.util import set_jam_action, is_iterable + +class BjamAction(object): """Class representing bjam action defined from Python.""" - + def __init__(self, action_name, function): + assert isinstance(action_name, basestring) + assert callable(function) or function is None self.action_name = action_name self.function = function - - def __call__(self, targets, sources, property_set): + def __call__(self, targets, sources, property_set_): + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) # Bjam actions defined from Python have only the command # to execute, and no associated jam procedural code. So # passing 'property_set' to it is not necessary. bjam_interface.call("set-update-action", self.action_name, targets, sources, []) if self.function: - self.function(targets, sources, property_set) + self.function(targets, sources, property_set_) -class BjamNativeAction: +class BjamNativeAction(BjamAction): """Class representing bjam action defined by Jam code. We still allow to associate a Python callable that will be called when this action is installed on any target. """ - - def __init__(self, action_name, function): - self.action_name = action_name - self.function = function - - def __call__(self, targets, sources, property_set): + + def __call__(self, targets, sources, property_set_): + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) if self.function: - self.function(targets, sources, property_set) - + self.function(targets, sources, property_set_) + p = [] if property_set: - p = property_set.raw() + p = property_set_.raw() + + set_jam_action(self.action_name, targets, sources, p) - b2.util.set_jam_action(self.action_name, targets, sources, p) - action_modifiers = {"updated": 0x01, "together": 0x02, "ignore": 0x04, @@ -77,6 +81,8 @@ class Engine: targets = [targets] if isinstance (sources, str): sources = [sources] + assert is_iterable(targets) + assert is_iterable(sources) for target in targets: for source in sources: @@ -105,6 +111,11 @@ class Engine: echo [ on $(targets) return $(MY-VAR) ] ; "Hello World" """ + if isinstance(targets, str): + targets = [targets] + assert is_iterable(targets) + assert isinstance(variable, basestring) + return bjam_interface.call('get-target-variable', targets, variable) def set_target_variable (self, targets, variable, value, append=0): @@ -114,13 +125,19 @@ class Engine: where to generate targets, and will also be available to updating rule for that 'taret'. """ - if isinstance (targets, str): + if isinstance (targets, str): targets = [targets] + if isinstance(value, str): + value = [value] + + assert is_iterable(targets) + assert isinstance(variable, basestring) + assert is_iterable(value) for target in targets: self.do_set_target_variable (target, variable, value, append) - def set_update_action (self, action_name, targets, sources, properties=property_set.empty()): + def set_update_action (self, action_name, targets, sources, properties=None): """ Binds a target to the corresponding update action. If target needs to be updated, the action registered with action_name will be used. @@ -128,9 +145,17 @@ class Engine: either 'register_action' or 'register_bjam_action' method. """ - assert(isinstance(properties, property_set.PropertySet)) - if isinstance (targets, str): + if isinstance(targets, str): targets = [targets] + if isinstance(sources, str): + sources = [sources] + if properties is None: + properties = property_set.empty() + assert isinstance(action_name, basestring) + assert is_iterable(targets) + assert is_iterable(sources) + assert(isinstance(properties, property_set.PropertySet)) + self.do_set_update_action (action_name, targets, sources, properties) def register_action (self, action_name, command, bound_list = [], flags = [], @@ -149,10 +174,11 @@ class Engine: This function will be called by set_update_action, and can set additional target variables. """ - if self.actions.has_key(action_name): - raise "Bjam action %s is already defined" % action_name - - assert(isinstance(flags, list)) + assert isinstance(action_name, basestring) + assert isinstance(command, basestring) + assert is_iterable(bound_list) + assert is_iterable(flags) + assert function is None or callable(function) bjam_flags = reduce(operator.or_, (action_modifiers[flag] for flag in flags), 0) @@ -178,25 +204,37 @@ class Engine: # action name. This way, jamfile rules that take action names # can just register them without specially checking if # action is already registered. + assert isinstance(action_name, basestring) + assert function is None or callable(function) if not self.actions.has_key(action_name): self.actions[action_name] = BjamNativeAction(action_name, function) - + # Overridables - def do_set_update_action (self, action_name, targets, sources, property_set): + def do_set_update_action (self, action_name, targets, sources, property_set_): + assert isinstance(action_name, basestring) + assert is_iterable(targets) + assert is_iterable(sources) + assert isinstance(property_set_, property_set.PropertySet) action = self.actions.get(action_name) if not action: raise Exception("No action %s was registered" % action_name) - action(targets, sources, property_set) + action(targets, sources, property_set_) def do_set_target_variable (self, target, variable, value, append): + assert isinstance(target, basestring) + assert isinstance(variable, basestring) + assert is_iterable(value) + assert isinstance(append, int) # matches bools if append: bjam_interface.call("set-target-variable", target, variable, value, "true") else: bjam_interface.call("set-target-variable", target, variable, value) - + def do_add_dependency (self, target, source): + assert isinstance(target, basestring) + assert isinstance(source, basestring) bjam_interface.call("DEPENDS", target, source) - - + + diff --git a/src/build/errors.py b/src/build/errors.py index d9dceefe0..69d8a37d3 100644 --- a/src/build/errors.py +++ b/src/build/errors.py @@ -1,8 +1,8 @@ # Status: being written afresh by Vladimir Prus -# Copyright 2007 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2007 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # This file is supposed to implement error reporting for Boost.Build. # Experience with jam version has shown that printing full backtrace @@ -22,7 +22,7 @@ import sys def format(message, prefix=""): parts = str(message).split("\n") return "\n".join(prefix+p for p in parts) - + class Context: @@ -71,7 +71,7 @@ class ExceptionWithUserContext(Exception): traceback.print_tb(self.original_tb_) elif self.stack_: for l in traceback.format_list(self.stack_): - print l, + print l, else: print " use the '--stacktrace' option to get Python stacktrace" print @@ -87,9 +87,9 @@ def user_error_checkpoint(callable): errors.handle_stray_exception(e) finally: errors.pop_user_context() - + return wrapper - + class Errors: def __init__(self): @@ -116,12 +116,12 @@ class Errors: def handle_stray_exception(self, e): raise ExceptionWithUserContext("unexpected exception", self.contexts_[:], - e, sys.exc_info()[2]) + e, sys.exc_info()[2]) def __call__(self, message): self._count = self._count + 1 - raise ExceptionWithUserContext(message, self.contexts_[:], + raise ExceptionWithUserContext(message, self.contexts_[:], stack=traceback.extract_stack()) - - + + diff --git a/src/build/feature.jam b/src/build/feature.jam index ee6abc591..e58edcbed 100644 --- a/src/build/feature.jam +++ b/src/build/feature.jam @@ -30,6 +30,7 @@ local rule setup ( ) link-incompatible subfeature order-sensitive + hidden ; .all-features = ; diff --git a/src/build/feature.py b/src/build/feature.py index 827dae340..6cf81a1a1 100644 --- a/src/build/feature.py +++ b/src/build/feature.py @@ -1,15 +1,15 @@ # Status: ported, except for unit tests. # Base revision: 64488 # -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2002, 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2002, 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import re -from b2.util import utility, bjam_signature +from b2.util import utility, bjam_signature, is_iterable_typed import b2.util.set from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq from b2.exceptions import * @@ -25,6 +25,9 @@ class Feature(object): _attribute_name_to_integer = {} def __init__(self, name, values, attributes): + assert isinstance(name, basestring) + assert is_iterable_typed(values, basestring) + assert is_iterable_typed(attributes, basestring) self._name = name self._values = values self._default = None @@ -42,12 +45,19 @@ class Feature(object): return self._values def add_values(self, values): + assert is_iterable_typed(values, basestring) self._values.extend(values) def attributes(self): return self._attributes def set_default(self, value): + assert isinstance(value, basestring) + for attr in ('free', 'optional'): + if getattr(self, attr)(): + get_manager().errors()('"{}" feature "<{}>" cannot have a default value.' + .format(attr, self._name)) + self._default = value def default(self): @@ -61,6 +71,7 @@ class Feature(object): return self._subfeatures def add_subfeature(self, name): + assert isinstance(name, Feature) self._subfeatures.append(name) def parent(self): @@ -72,19 +83,21 @@ class Feature(object): return self._parent def set_parent(self, feature, value): + assert isinstance(feature, Feature) + assert isinstance(value, basestring) self._parent = (feature, value) def __str__(self): return self._name - + def reset (): """ Clear the module state. This is mainly for testing purposes. """ global __all_attributes, __all_features, __implicit_features, __composite_properties global __features_with_attributes, __subfeature_from_value, __all_top_features, __free_features global __all_subfeatures - + # The list with all attribute names. __all_attributes = [ 'implicit', 'composite', @@ -107,28 +120,28 @@ def reset (): return getattr(self, "_attributes") & flag setattr(Feature, a.replace("-", "_"), probe) i = i << 1 - + # A map containing all features. The key is the feature name. # The value is an instance of Feature class. __all_features = {} - + # All non-subfeatures. __all_top_features = [] - + # Maps valus to the corresponding implicit feature __implicit_features = {} - + # A map containing all composite properties. The key is a Property instance, # and the value is a list of Property instances __composite_properties = {} - + __features_with_attributes = {} for attribute in __all_attributes: __features_with_attributes [attribute] = [] - + # Maps a value to the corresponding subfeature name. __subfeature_from_value = {} - + # All free features __free_features = [] @@ -146,6 +159,7 @@ def get(name): Throws if no feature by such name exists """ + assert isinstance(name, basestring) return __all_features[name] # FIXME: prepare-test/finish-test? @@ -163,12 +177,12 @@ def feature (name, values, attributes = []): __all_features[name] = feature # Temporary measure while we have not fully moved from 'gristed strings' __all_features["<" + name + ">"] = feature - + for attribute in attributes: __features_with_attributes [attribute].append (name) name = add_grist(name) - + if 'subfeature' in attributes: __all_subfeatures.append(name) else: @@ -208,9 +222,10 @@ def set_default (feature, value): def defaults(features): """ Returns the default property values for the given features. """ + assert is_iterable_typed(features, Feature) # FIXME: should merge feature and property modules. - import property - + from . import property + result = [] for f in features: if not f.free() and not f.optional() and f.default(): @@ -221,21 +236,22 @@ def defaults(features): def valid (names): """ Returns true iff all elements of names are valid features. """ - def valid_one (name): return __all_features.has_key (name) - - if isinstance (names, str): - return valid_one (names) - else: - return all([ valid_one (name) for name in names ]) + if isinstance(names, str): + names = [names] + assert is_iterable_typed(names, basestring) + + return all(name in __all_features for name in names) def attributes (feature): """ Returns the attributes of the given feature. """ + assert isinstance(feature, basestring) return __all_features[feature].attributes_string_list() - + def values (feature): """ Return the values of the given feature. """ + assert isinstance(feature, basestring) validate_feature (feature) return __all_features[feature].values() @@ -243,43 +259,43 @@ def is_implicit_value (value_string): """ Returns true iff 'value_string' is a value_string of an implicit feature. """ - + assert isinstance(value_string, basestring) if __implicit_features.has_key(value_string): return __implicit_features[value_string] - + v = value_string.split('-') if not __implicit_features.has_key(v[0]): return False feature = __implicit_features[v[0]] - + for subvalue in (v[1:]): if not __find_implied_subfeature(feature, subvalue, v[0]): return False - + return True def implied_feature (implicit_value): """ Returns the implicit feature associated with the given implicit value. """ + assert isinstance(implicit_value, basestring) components = implicit_value.split('-') - + if not __implicit_features.has_key(components[0]): raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value) - + return __implicit_features[components[0]] def __find_implied_subfeature (feature, subvalue, value_string): - - #if value_string == None: value_string = '' + assert isinstance(feature, Feature) + assert isinstance(subvalue, basestring) + assert isinstance(value_string, basestring) - if not __subfeature_from_value.has_key(feature) \ - or not __subfeature_from_value[feature].has_key(value_string) \ - or not __subfeature_from_value[feature][value_string].has_key (subvalue): + try: + return __subfeature_from_value[feature][value_string][subvalue] + except KeyError: return None - - return __subfeature_from_value[feature][value_string][subvalue] # Given a feature and a value of one of its subfeatures, find the name # of the subfeature. If value-string is supplied, looks for implied @@ -289,6 +305,9 @@ def __find_implied_subfeature (feature, subvalue, value_string): # value-string # The value of the main feature def implied_subfeature (feature, subvalue, value_string): + assert isinstance(feature, Feature) + assert isinstance(subvalue, basestring) + assert isinstance(value_string, basestring) result = __find_implied_subfeature (feature, subvalue, value_string) if not result: raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string)) @@ -298,28 +317,19 @@ def implied_subfeature (feature, subvalue, value_string): def validate_feature (name): """ Checks if all name is a valid feature. Otherwise, raises an exception. """ + assert isinstance(name, basestring) if not __all_features.has_key(name): raise InvalidFeature ("'%s' is not a valid feature name" % name) else: return __all_features[name] -def valid (names): - """ Returns true iff all elements of names are valid features. - """ - def valid_one (name): return __all_features.has_key (name) - - if isinstance (names, str): - return valid_one (names) - else: - return [ valid_one (name) for name in names ] - # Uses Property -def __expand_subfeatures_aux (property, dont_validate = False): +def __expand_subfeatures_aux (property_, dont_validate = False): """ Helper for expand_subfeatures. Given a feature and value, or just a value corresponding to an implicit feature, returns a property set consisting of all component subfeatures and their values. For example: - + expand_subfeatures gcc-2.95.2-linux-x86 -> gcc 2.95.2 linux x86 equivalent to: @@ -329,33 +339,35 @@ def __expand_subfeatures_aux (property, dont_validate = False): value: The value of the feature. dont_validate: If True, no validation of value string will be done. """ - f = property.feature() - v = property.value() + from . import property # no __debug__ since Property is used elsewhere + assert isinstance(property_, property.Property) + assert isinstance(dont_validate, int) # matches bools + + f = property_.feature() + v = property_.value() if not dont_validate: validate_value_string(f, v) components = v.split ("-") - + v = components[0] - import property + result = [property.Property(f, components[0])] - result = [property.Property(f, components[0])] - subvalues = components[1:] while len(subvalues) > 0: subvalue = subvalues [0] # pop the head off of subvalues subvalues = subvalues [1:] - + subfeature = __find_implied_subfeature (f, subvalue, v) - + # If no subfeature was found, reconstitute the value string and use that if not subfeature: return [property.Property(f, '-'.join(components))] - + result.append(property.Property(subfeature, subvalue)) - + return result def expand_subfeatures(properties, dont_validate = False): @@ -363,11 +375,11 @@ def expand_subfeatures(properties, dont_validate = False): Make all elements of properties corresponding to implicit features explicit, and express all subfeature values as separate properties in their own right. For example, the property - + gcc-2.95.2-linux-x86 - + might expand to - + gcc 2.95.2 linux x86 properties: A sequence with elements of the form @@ -375,6 +387,10 @@ def expand_subfeatures(properties, dont_validate = False): case of implicit features. : dont_validate: If True, no validation of value string will be done. """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + assert isinstance(dont_validate, int) # matches bools result = [] for p in properties: # Don't expand subfeatures in subfeatures @@ -408,6 +424,8 @@ def expand_subfeatures(properties, dont_validate = False): def extend (name, values): """ Adds the given values to the given feature. """ + assert isinstance(name, basestring) + assert is_iterable_typed(values, basestring) name = add_grist (name) __validate_feature (name) feature = __all_features [name] @@ -419,7 +437,7 @@ def extend (name, values): __implicit_features[v] = feature - if len (feature.values()) == 0 and len (values) > 0: + if values and not feature.values() and not(feature.free() or feature.optional()): # This is the first value specified for this feature, # take it as default value feature.set_default(values[0]) @@ -429,6 +447,8 @@ def extend (name, values): def validate_value_string (f, value_string): """ Checks that value-string is a valid value-string for the given feature. """ + assert isinstance(f, Feature) + assert isinstance(value_string, basestring) if f.free() or value_string in f.values(): return @@ -453,39 +473,42 @@ def validate_value_string (f, value_string): value-string is provided, the subvalues are only valid for the given value of the feature. Thus, you could say that mingw is specifc to gcc-2.95.2 as follows: - + extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ; feature: The feature whose subfeature is being extended. - + value-string: If supplied, specifies a specific value of the main feature for which the new subfeature values are valid. - + subfeature: The name of the subfeature. - + subvalues: The additional values of the subfeature being defined. """ def extend_subfeature (feature_name, value_string, subfeature_name, subvalues): - + assert isinstance(feature_name, basestring) + assert isinstance(value_string, basestring) + assert isinstance(subfeature_name, basestring) + assert is_iterable_typed(subvalues, basestring) feature = validate_feature(feature_name) - + if value_string: validate_value_string(feature, value_string) subfeature_name = feature_name + '-' + __get_subfeature_name (subfeature_name, value_string) - + extend(subfeature_name, subvalues) ; subfeature = __all_features[subfeature_name] if value_string == None: value_string = '' - + if not __subfeature_from_value.has_key(feature): __subfeature_from_value [feature] = {} - + if not __subfeature_from_value[feature].has_key(value_string): __subfeature_from_value [feature][value_string] = {} - + for subvalue in subvalues: __subfeature_from_value [feature][value_string][subvalue] = subfeature @@ -496,16 +519,16 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes = feature_name: Root feature that is not a subfeature. value_string: An optional value-string specifying which feature or subfeature values this subfeature is specific to, - if any. + if any. subfeature: The name of the subfeature being declared. subvalues: The allowed values of this subfeature. attributes: The attributes of the subfeature. """ parent_feature = validate_feature (feature_name) - + # Add grist to the subfeature name if a value-string was supplied subfeature_name = __get_subfeature_name (subfeature, value_string) - + if subfeature_name in __all_features[feature_name].subfeatures(): message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name) message += " specific to '%s'" % value_string @@ -514,7 +537,7 @@ def subfeature (feature_name, value_string, subfeature, subvalues, attributes = # First declare the subfeature as a feature in its own right f = feature (feature_name + '-' + subfeature_name, subvalues, attributes + ['subfeature']) f.set_parent(parent_feature, value_string) - + parent_feature.add_subfeature(f) # Now make sure the subfeature values are known. @@ -527,7 +550,7 @@ def compose (composite_property_s, component_properties_s): All parameters are value strings """ - import property + from . import property component_properties_s = to_seq (component_properties_s) composite_property = property.create_from_string(composite_property_s) @@ -537,7 +560,7 @@ def compose (composite_property_s, component_properties_s): component_properties = component_properties_s else: component_properties = [property.create_from_string(p) for p in component_properties_s] - + if not f.composite(): raise BaseException ("'%s' is not a composite feature" % f) @@ -550,10 +573,13 @@ def compose (composite_property_s, component_properties_s): __composite_properties[composite_property] = component_properties -def expand_composite(property): - result = [ property ] - if __composite_properties.has_key(property): - for p in __composite_properties[property]: +def expand_composite(property_): + if __debug__: + from .property import Property + assert isinstance(property_, Property) + result = [ property_ ] + if __composite_properties.has_key(property_): + for p in __composite_properties[property_]: result.extend(expand_composite(p)) return result @@ -567,7 +593,7 @@ def get_values (feature, properties): for p in properties: if get_grist (p) == feature: result.append (replace_grist (p, '')) - + return result def free_features (): @@ -579,6 +605,9 @@ def expand_composites (properties): """ Expand all composite properties in the set so that all components are explicitly expressed. """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) explicit_features = set(p.feature() for p in properties) result = [] @@ -604,7 +633,7 @@ def expand_composites (properties): result.append (x) elif any(r.feature() == f for r in result): raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n" - "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f, + "existing values: '%s'\nvalue from expanding '%s': '%s'" % (f, [r.value() for r in result if r.feature() == f], p, x.value())) else: result.append (x) @@ -617,6 +646,11 @@ def is_subfeature_of (parent_property, f): feature, or if f is a subfeature of the parent_property's feature specific to the parent_property's value. """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert isinstance(f, Feature) + if not f.subfeature(): return False @@ -638,38 +672,27 @@ def is_subfeature_of (parent_property, f): def __is_subproperty_of (parent_property, p): """ As is_subfeature_of, for subproperties. """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert isinstance(p, Property) return is_subfeature_of (parent_property, p.feature()) - + # Returns true iff the subvalue is valid for the feature. When the # optional value-string is provided, returns true iff the subvalues # are valid for the given value of the feature. def is_subvalue(feature, value_string, subfeature, subvalue): - + assert isinstance(feature, basestring) + assert isinstance(value_string, basestring) + assert isinstance(subfeature, basestring) + assert isinstance(subvalue, basestring) if not value_string: value_string = '' - - if not __subfeature_from_value.has_key(feature): + try: + return __subfeature_from_value[feature][value_string][subvalue] == subfeature + except KeyError: return False - - if not __subfeature_from_value[feature].has_key(value_string): - return False - - if not __subfeature_from_value[feature][value_string].has_key(subvalue): - return False - - if __subfeature_from_value[feature][value_string][subvalue]\ - != subfeature: - return False - - return True - -def implied_subfeature (feature, subvalue, value_string): - result = __find_implied_subfeature (feature, subvalue, value_string) - if not result: - raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string)) - - return result # Uses Property @@ -684,43 +707,50 @@ def expand (properties): two values of a given non-free feature are directly expressed in the input, an error is issued. """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) expanded = expand_subfeatures(properties) return expand_composites (expanded) - + # Accepts list of Property objects def add_defaults (properties): """ Given a set of properties, add default values for features not - represented in the set. + represented in the set. Note: if there's there's ordinary feature F1 and composite feature F2, which includes some value for F1, and both feature have default values, then the default value of F1 will be added, not the value in F2. This might not be right idea: consider - + feature variant : debug ... ; debug : .... on feature : off on ; - + Here, when adding default for an empty property set, we'll get - + debug off - - and that's kind of strange. + + and that's kind of strange. """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + result = [x for x in properties] - + handled_features = set() for p in properties: # We don't add default for conditional properties. We don't want # debug:DEBUG to be takes as specified value for if not p.condition(): handled_features.add(p.feature()) - + missing_top = [f for f in __all_top_features if not f in handled_features] more = defaults(missing_top) result.extend(more) for p in more: handled_features.add(p.feature()) - + # Add defaults for subfeatures of features which are present for p in result[:]: s = p.feature().subfeatures() @@ -728,7 +758,7 @@ def add_defaults (properties): for p in more: handled_features.add(p.feature()) result.extend(more) - + return result def minimize (properties): @@ -739,29 +769,31 @@ def minimize (properties): Implicit properties will be expressed without feature grist, and sub-property values will be expressed as elements joined to the corresponding main property. - """ - + """ + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) # remove properties implied by composite features components = [] for property in properties: if __composite_properties.has_key (property): components.extend(__composite_properties[property]) properties = b2.util.set.difference (properties, components) - + # handle subfeatures and implicit features # move subfeatures to the end of the list properties = [p for p in properties if not p.feature().subfeature()] +\ [p for p in properties if p.feature().subfeature()] - + result = [] while properties: p = properties[0] f = p.feature() - + # locate all subproperties of $(x[1]) in the property set subproperties = __select_subproperties (p, properties) - + if subproperties: # reconstitute the joined property name subproperties.sort () @@ -774,7 +806,7 @@ def minimize (properties): # eliminate properties whose value is equal to feature's # default and which are not symmetric and which do not # contradict values implied by composite properties. - + # since all component properties of composites in the set # have been eliminated, any remaining property whose # feature is the same as a component of a composite in the @@ -784,7 +816,7 @@ def minimize (properties): #\ #or get_grist (fullp) in get_grist (components): # FIXME: restore above - + properties = properties[1:] @@ -802,17 +834,17 @@ def split (properties): substitution of backslashes for slashes, since Jam, unbidden, sometimes swaps slash direction on NT. """ - + assert isinstance(properties, basestring) def split_one (properties): pieces = re.split (__re_slash_or_backslash, properties) result = [] - + for x in pieces: if not get_grist (x) and len (result) > 0 and get_grist (result [-1]): result = result [0:-1] + [ result [-1] + '/' + x ] else: result.append (x) - + return result if isinstance (properties, str): @@ -822,32 +854,34 @@ def split (properties): for p in properties: result += split_one (p) return result - + def compress_subproperties (properties): """ Combine all subproperties into their parent properties Requires: for every subproperty, there is a parent property. All features are explicitly expressed. - + This rule probably shouldn't be needed, but build-request.expand-no-defaults is being abused for unintended purposes and it needs help """ + from .property import Property + assert is_iterable_typed(properties, Property) result = [] matched_subs = set() all_subs = set() for p in properties: f = p.feature() - + if not f.subfeature(): subs = __select_subproperties (p, properties) if subs: - + matched_subs.update(subs) subvalues = '-'.join (sub.value() for sub in subs) - result.append(b2.build.property.Property( + result.append(Property( p.feature(), p.value() + '-' + subvalues, p.condition())) else: @@ -865,10 +899,16 @@ def compress_subproperties (properties): # Private methods def __select_subproperties (parent_property, properties): + if __debug__: + from .property import Property + assert is_iterable_typed(properties, Property) + assert isinstance(parent_property, Property) return [ x for x in properties if __is_subproperty_of (parent_property, x) ] def __get_subfeature_name (subfeature, value_string): - if value_string == None: + assert isinstance(subfeature, basestring) + assert isinstance(value_string, basestring) or value_string is None + if value_string == None: prefix = '' else: prefix = value_string + ':' @@ -877,10 +917,12 @@ def __get_subfeature_name (subfeature, value_string): def __validate_feature_attributes (name, attributes): + assert isinstance(name, basestring) + assert is_iterable_typed(attributes, basestring) for attribute in attributes: if not attribute in __all_attributes: raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (b2.util.set.difference (attributes, __all_attributes)), name)) - + if name in __all_features: raise AlreadyDefined ("feature '%s' already defined" % name) elif 'implicit' in attributes and 'free' in attributes: @@ -888,10 +930,11 @@ def __validate_feature_attributes (name, attributes): elif 'free' in attributes and 'propagated' in attributes: raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name) - + def __validate_feature (feature): """ Generates an error if the feature is unknown. """ + assert isinstance(feature, basestring) if not __all_features.has_key (feature): raise BaseException ('unknown feature "%s"' % feature) @@ -902,6 +945,10 @@ def __select_subfeatures (parent_property, features): subfeatures of the property's feature which are conditional on the property's value. """ + if __debug__: + from .property import Property + assert isinstance(parent_property, Property) + assert is_iterable_typed(features, Feature) return [f for f in features if is_subfeature_of (parent_property, f)] - + # FIXME: copy over tests. diff --git a/src/build/generators.py b/src/build/generators.py index dd195a840..f1c514556 100644 --- a/src/build/generators.py +++ b/src/build/generators.py @@ -18,13 +18,13 @@ # It starts by selecting 'viable generators', which have any chances of producing # the desired target type with the required properties. Generators are ranked and # a set of most specific ones is selected. -# +# # The most specific generators have their 'run' methods called, with the properties # and list of sources. Each one selects target which can be directly consumed, and # tries to convert the remaining ones to the types it can consume. This is done # by recursively calling 'construct' with all consumable types. # -# If the generator has collected all the targets it needs, it creates targets +# If the generator has collected all the targets it needs, it creates targets # corresponding to result, and returns it. When all generators have been run, # results of one of them are selected and returned as result. # @@ -35,7 +35,7 @@ # Likewise, when generator tries to convert sources to consumable types, it can get # more targets that it was asked for. The question is what to do with extra targets. # Boost.Build attempts to convert them to requested types, and attempts as early as -# possible. Specifically, this is done after invoking each generator. (Later I'll +# possible. Specifically, this is done after invoking each generator. (Later I'll # document the rationale for trying extra target conversion at that point). # # That early conversion is not always desirable. Suppose a generator got a source of @@ -52,10 +52,10 @@ import cStringIO import os.path from virtual_target import Subvariant -import virtual_target, type, property_set, property +from . import virtual_target, type, property_set, property from b2.util.logger import * from b2.util.utility import * -from b2.util import set +from b2.util import set as set_, is_iterable_typed, is_iterable from b2.util.sequence import unique import b2.util.sequence as sequence from b2.manager import get_manager @@ -73,8 +73,8 @@ def reset (): __type_to_generators = {} __generators_for_toolset = {} __overrides = {} - - # TODO: can these be global? + + # TODO: can these be global? __construct_stack = [] __viable_generators_cache = {} __viable_source_types_cache = {} @@ -95,7 +95,7 @@ __indent = "" def debug(): global __debug if __debug is None: - __debug = "--debug-generators" in bjam.variable("ARGV") + __debug = "--debug-generators" in bjam.variable("ARGV") return __debug def increase_indent(): @@ -114,7 +114,7 @@ def decrease_indent(): # same generator. Does nothing if a non-derived target type is passed to it. # def update_cached_information_with_a_new_type(type): - + assert isinstance(type, basestring) base_type = b2.build.type.base(type) if base_type: @@ -153,7 +153,7 @@ def invalidate_extendable_viable_source_target_type_cache(): __vst_cached_types.append(t) else: del __viable_source_types_cache[t] - + def dout(message): if debug(): print __indent + message @@ -162,7 +162,7 @@ class Generator: """ Creates a generator. manager: the build manager. id: identifies the generator - + rule: the rule which sets up build actions. composing: whether generator processes each source target in @@ -171,49 +171,52 @@ class Generator: recusrive generators.construct_types call. source_types (optional): types that this generator can handle - + target_types_and_names: types the generator will create and, optionally, names for created targets. Each element should have the form type["(" name-pattern ")"] for example, obj(%_x). Name of generated target will be found by replacing % with the name of source, provided explicit name was not specified. - + requirements (optional) - + NOTE: all subclasses must have a similar signature for clone to work! """ def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []): - assert(not isinstance(source_types, str)) - assert(not isinstance(target_types_and_names, str)) + assert isinstance(id, basestring) + assert isinstance(composing, bool) + assert is_iterable_typed(source_types, basestring) + assert is_iterable_typed(target_types_and_names, basestring) + assert is_iterable_typed(requirements, basestring) self.id_ = id self.composing_ = composing self.source_types_ = source_types self.target_types_and_names_ = target_types_and_names self.requirements_ = requirements - + self.target_types_ = [] self.name_prefix_ = [] self.name_postfix_ = [] - + for e in target_types_and_names: # Create three parallel lists: one with the list of target types, - # and two other with prefixes and postfixes to be added to target + # and two other with prefixes and postfixes to be added to target # name. We use parallel lists for prefix and postfix (as opposed # to mapping), because given target type might occur several times, # for example "H H(%_symbols)". m = _re_separate_types_prefix_and_postfix.match (e) - + if not m: raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id)) - + target_type = m.group (1) if not target_type: target_type = '' prefix = m.group (3) if not prefix: prefix = '' postfix = m.group (4) if not postfix: postfix = '' - + self.target_types_.append (target_type) self.name_prefix_.append (prefix) self.name_postfix_.append (postfix) @@ -229,9 +232,11 @@ class Generator: - id - value to feature in properties """ - return self.__class__ (new_id, - self.composing_, - self.source_types_, + assert isinstance(new_id, basestring) + assert is_iterable_typed(new_toolset_properties, basestring) + return self.__class__ (new_id, + self.composing_, + self.source_types_, self.target_types_and_names_, # Note: this does not remove any subfeatures of # which might cause problems @@ -241,11 +246,13 @@ class Generator: """Creates another generator that is the same as $(self), except that if 'base' is in target types of $(self), 'type' will in target types of the new generator.""" + assert isinstance(base, basestring) + assert isinstance(type, basestring) target_types = [] for t in self.target_types_and_names_: m = _re_match_type.match(t) assert m - + if m.group(1) == base: if m.group(2): target_types.append(type + m.group(2)) @@ -258,7 +265,7 @@ class Generator: self.source_types_, target_types, self.requirements_) - + def id(self): return self.id_ @@ -271,28 +278,29 @@ class Generator: def target_types (self): """ Returns the list of target types that this generator produces. It is assumed to be always the same -- i.e. it cannot change depending - list of sources. + list of sources. """ return self.target_types_ def requirements (self): """ Returns the required properties for this generator. Properties - in returned set must be present in build properties if this + in returned set must be present in build properties if this generator is to be used. If result has grist-only element, that build properties must include some value of that feature. """ return self.requirements_ def match_rank (self, ps): - """ Returns true if the generator can be run with the specified + """ Returns true if the generator can be run with the specified properties. """ # See if generator's requirements are satisfied by # 'properties'. Treat a feature name in requirements # (i.e. grist-only element), as matching any value of the # feature. + assert isinstance(ps, property_set.PropertySet) all_requirements = self.requirements () - + property_requirements = [] feature_requirements = [] # This uses strings because genenator requirements allow @@ -304,31 +312,38 @@ class Generator: else: feature_requirements.append (r) - + return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \ and all(ps.get(get_grist(s)) for s in feature_requirements) - + def run (self, project, name, prop_set, sources): """ Tries to invoke this generator on the given sources. Returns a list of generated targets (instances of 'virtual-target'). project: Project for which the targets are generated. - - name: Determines the name of 'name' attribute for + + name: Determines the name of 'name' attribute for all generated targets. See 'generated_targets' method. - + prop_set: Desired properties for generated targets. - + sources: Source targets. """ - + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + # intermediary targets don't have names, so None is possible + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + if project.manager ().logger ().on (): project.manager ().logger ().log (__name__, " generator '%s'" % self.id_) project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_) - + if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1: raise BaseException ("Unsupported source/source_type combination") - + # We don't run composing generators if no name is specified. The reason # is that composing generator combines several targets, which can have # different names, and it cannot decide which name to give for produced @@ -337,7 +352,7 @@ class Generator: # This in effect, means that composing generators are runnable only # at top-level of transofrmation graph, or if name is passed explicitly. # Thus, we dissallow composing generators in the middle. For example, the - # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed + # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed # (the OBJ -> STATIC_LIB generator is composing) if not self.composing_ or name: return self.run_really (project, name, prop_set, sources) @@ -345,15 +360,21 @@ class Generator: return [] def run_really (self, project, name, prop_set, sources): - + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + # intermediary targets don't have names, so None is possible + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) # consumed: Targets that this generator will consume directly. # bypassed: Targets that can't be consumed and will be returned as-is. - + if self.composing_: (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources) else: (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources) - + result = [] if consumed: result = self.construct_result (consumed, project, name, prop_set) @@ -369,17 +390,23 @@ class Generator: return result def construct_result (self, consumed, project, name, prop_set): - """ Constructs the dependency graph that will be returned by this + """ Constructs the dependency graph that will be returned by this generator. consumed: Already prepared list of consumable targets - If generator requires several source files will contain + If generator requires several source files will contain exactly len $(self.source_types_) targets with matching types - Otherwise, might contain several targets with the type of + Otherwise, might contain several targets with the type of self.source_types_ [0] project: name: prop_set: Properties to be used for all actions create here """ + if __debug__: + from .targets import ProjectTarget + assert is_iterable_typed(consumed, virtual_target.VirtualTarget) + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) result = [] # If this is 1->1 transformation, apply it to all consumed targets in order. if len (self.source_types_) < 2 and not self.composing_: @@ -395,6 +422,7 @@ class Generator: return result def determine_target_name(self, fullname): + assert isinstance(fullname, basestring) # Determine target name from fullname (maybe including path components) # Place optional prefix and postfix around basename @@ -415,7 +443,8 @@ class Generator: def determine_output_name(self, sources): """Determine the name of the produced target from the names of the sources.""" - + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + # The simple case if when a name # of source has single dot. Then, we take the part before # dot. Several dots can be caused by: @@ -426,47 +455,53 @@ class Generator: # dot. In the second case -- no sure, but for now take # the part till the last dot too. name = os.path.splitext(sources[0].name())[0] - + for s in sources[1:]: n2 = os.path.splitext(s.name()) if n2 != name: get_manager().errors()( "%s: source targets have different names: cannot determine target name" % (self.id_)) - + # Names of sources might include directory. We should strip it. return self.determine_target_name(sources[0].name()) - - + + def generated_targets (self, sources, prop_set, project, name): """ Constructs targets that are created after consuming 'sources'. The result will be the list of virtual-target, which the same length as 'target_types' attribute and with corresponding types. - - When 'name' is empty, all source targets must have the same value of + + When 'name' is empty, all source targets must have the same value of the 'name' attribute, which will be used instead of the 'name' argument. - + The value of 'name' attribute for each generated target will be equal to the 'name' parameter if there's no name pattern for this type. Otherwise, - the '%' symbol in the name pattern will be replaced with the 'name' parameter + the '%' symbol in the name pattern will be replaced with the 'name' parameter to obtain the 'name' attribute. - + For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes for T1 and T2 are .t1 and t2, and source if foo.z, then created files would be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the basename of a file. - + Note that this pattern mechanism has nothing to do with implicit patterns - in make. It's a way to produce target which name is different for name of + in make. It's a way to produce target which name is different for name of source. """ + if __debug__: + from .targets import ProjectTarget + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None if not name: name = self.determine_output_name(sources) - + # Assign an action for each target action = self.action_class() a = action(project.manager(), sources, self.id_, prop_set) - + # Create generated target for each target type. targets = [] pre = self.name_prefix_ @@ -477,9 +512,9 @@ class Generator: generated_name = os.path.join(os.path.dirname(name), generated_name) pre = pre[1:] post = post[1:] - + targets.append(virtual_target.FileTarget(generated_name, t, project, a)) - + return [ project.manager().virtual_targets().register(t) for t in targets ] def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): @@ -489,17 +524,24 @@ class Generator: only_one: convert 'source' to only one of source types if there's more that one possibility, report an error. - + Returns a pair: - consumed: all targets that can be consumed. + consumed: all targets that can be consumed. bypassed: all targets that cannot be consumed. """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(name, basestring) or name is None + assert isinstance(project, ProjectTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(only_one, bool) consumed = [] bypassed = [] - missing_types = [] + missing_types = [] if len (sources) > 1: - # Don't know how to handle several sources yet. Just try + # Don't know how to handle several sources yet. Just try # to pass the request to other generator missing_types = self.source_types_ @@ -507,26 +549,26 @@ class Generator: (c, m) = self.consume_directly (sources [0]) consumed += c missing_types += m - + # No need to search for transformation if # some source type has consumed source and # no more source types are needed. if only_one and consumed: missing_types = [] - + #TODO: we should check that only one source type #if create of 'only_one' is true. # TODO: consider if consuned/bypassed separation should # be done by 'construct_types'. - + if missing_types: transformed = construct_types (project, name, missing_types, prop_set, sources) - + # Add targets of right type to 'consumed'. Add others to # 'bypassed'. The 'generators.construct' rule has done # its best to convert everything to the required type. # There's no need to rerun it on targets of different types. - + # NOTE: ignoring usage requirements for t in transformed[1]: if t.type() in missing_types: @@ -534,36 +576,45 @@ class Generator: else: bypassed.append(t) - + consumed = unique(consumed) bypassed = unique(bypassed) - + # remove elements of 'bypassed' that are in 'consumed' - - # Suppose the target type of current generator, X is produced from + + # Suppose the target type of current generator, X is produced from # X_1 and X_2, which are produced from Y by one generator. # When creating X_1 from Y, X_2 will be added to 'bypassed' # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed' # But they are also in 'consumed'. We have to remove them from # bypassed, so that generators up the call stack don't try to convert - # them. + # them. # In this particular case, X_1 instance in 'consumed' and X_1 instance # in 'bypassed' will be the same: because they have the same source and # action name, and 'virtual-target.register' won't allow two different # instances. Therefore, it's OK to use 'set.difference'. - + bypassed = set.difference(bypassed, consumed) return (consumed, bypassed) - + def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources): """ Converts several files to consumable types. - """ + """ consumed = [] bypassed = [] + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + + assert isinstance(project, ProjectTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) # We process each source one-by-one, trying to convert it to # a usable type. for s in sources: @@ -578,12 +629,13 @@ class Generator: return (consumed, bypassed) def consume_directly (self, source): + assert isinstance(source, virtual_target.VirtualTarget) real_source_type = source.type () # If there are no source types, we can consume anything source_types = self.source_types() if not source_types: - source_types = [real_source_type] + source_types = [real_source_type] consumed = [] missing_types = [] @@ -596,9 +648,9 @@ class Generator: missing_types.append (st) return (consumed, missing_types) - + def action_class (self): - """ Returns the class to be used to actions. Default implementation + """ Returns the class to be used to actions. Default implementation returns "action". """ return virtual_target.Action @@ -607,11 +659,13 @@ class Generator: def find (id): """ Finds the generator with id. Returns None if not found. """ + assert isinstance(id, basestring) return __generators.get (id, None) def register (g): """ Registers new generator instance 'g'. """ + assert isinstance(g, Generator) id = g.id() __generators [id] = g @@ -660,6 +714,19 @@ def register (g): invalidate_extendable_viable_source_target_type_cache() +def check_register_types(fn): + def wrapper(id, source_types, target_types, requirements=[]): + assert isinstance(id, basestring) + assert is_iterable_typed(source_types, basestring) + assert is_iterable_typed(target_types, basestring) + assert is_iterable_typed(requirements, basestring) + return fn(id, source_types, target_types, requirements=requirements) + wrapper.__name__ = fn.__name__ + wrapper.__doc__ = fn.__doc__ + return wrapper + + +@check_register_types def register_standard (id, source_types, target_types, requirements = []): """ Creates new instance of the 'generator' class and registers it. Returns the creates instance. @@ -671,6 +738,8 @@ def register_standard (id, source_types, target_types, requirements = []): register (g) return g + +@check_register_types def register_composing (id, source_types, target_types, requirements = []): g = Generator (id, True, source_types, target_types, requirements) register (g) @@ -679,6 +748,7 @@ def register_composing (id, source_types, target_types, requirements = []): def generators_for_toolset (toolset): """ Returns all generators which belong to 'toolset'. """ + assert isinstance(toolset, basestring) return __generators_for_toolset.get(toolset, []) def override (overrider_id, overridee_id): @@ -687,26 +757,29 @@ def override (overrider_id, overridee_id): that could produce a target of certain type, both those generators are amoung viable generators, the overridden generator is immediately discarded. - + The overridden generators are discarded immediately after computing the list of viable generators, before running any of them.""" - + assert isinstance(overrider_id, basestring) + assert isinstance(overridee_id, basestring) + __overrides.setdefault(overrider_id, []).append(overridee_id) def __viable_source_types_real (target_type): """ Returns a list of source type which can possibly be converted to 'target_type' by some chain of generator invocation. - + More formally, takes all generators for 'target_type' and returns union of source types for those generators and result of calling itself recusrively on source types. """ + assert isinstance(target_type, basestring) generators = [] # 't0' is the initial list of target types we need to process to get a list # of their viable source target types. New target types will not be added to - # this list. + # this list. t0 = type.all_bases (target_type) @@ -714,14 +787,14 @@ def __viable_source_types_real (target_type): # list of their viable source target types. This list will get expanded as # we locate more target types to process. t = t0 - + result = [] while t: - # Find all generators for current type. + # Find all generators for current type. # Unlike 'find_viable_generators' we don't care about prop_set. generators = __type_to_generators.get (t [0], []) t = t[1:] - + for g in generators: if not g.source_types(): # Empty source types -- everything can be accepted @@ -729,7 +802,7 @@ def __viable_source_types_real (target_type): # This will terminate outer loop. t = None break - + for source_type in g.source_types (): if not source_type in result: # If generator accepts 'source_type' it @@ -750,13 +823,14 @@ def __viable_source_types_real (target_type): if not n in t0: t.append (n) result.append (n) - + return result def viable_source_types (target_type): """ Helper rule, caches the result of '__viable_source_types_real'. """ + assert isinstance(target_type, basestring) if not __viable_source_types_cache.has_key(target_type): __vst_cached_types.append(target_type) __viable_source_types_cache [target_type] = __viable_source_types_real (target_type) @@ -767,6 +841,7 @@ def viable_source_types_for_generator_real (generator): method of 'generator', has some change of being eventually used (probably after conversion by other generators) """ + assert isinstance(generator, Generator) source_types = generator.source_types () if not source_types: @@ -791,15 +866,24 @@ def viable_source_types_for_generator_real (generator): def viable_source_types_for_generator (generator): """ Caches the result of 'viable_source_types_for_generator'. """ + assert isinstance(generator, Generator) if not __viable_source_types_cache.has_key(generator): __vstg_cached_generators.append(generator) __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator) - + return __viable_source_types_cache[generator] def try_one_generator_really (project, name, generator, target_type, properties, sources): """ Returns usage requirements + list of created targets. """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(generator, Generator) + assert isinstance(target_type, basestring) + assert isinstance(properties, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) targets = generator.run (project, name, properties, sources) usage_requirements = [] @@ -809,7 +893,7 @@ def try_one_generator_really (project, name, generator, target_type, properties, if targets: success = True; - + if isinstance (targets[0], property_set.PropertySet): usage_requirements = targets [0] targets = targets [1] @@ -818,7 +902,7 @@ def try_one_generator_really (project, name, generator, target_type, properties, usage_requirements = property_set.empty () dout( " generator" + generator.id() + " spawned ") - # generators.dout [ indent ] " " $(targets) ; + # generators.dout [ indent ] " " $(targets) ; # if $(usage-requirements) # { # generators.dout [ indent ] " with usage requirements:" $(x) ; @@ -834,21 +918,29 @@ def try_one_generator (project, name, generator, target_type, properties, source to fail. If so, quickly returns empty list. Otherwise, calls try_one_generator_really. """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(generator, Generator) + assert isinstance(target_type, basestring) + assert isinstance(properties, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) source_types = [] for s in sources: source_types.append (s.type ()) viable_source_types = viable_source_types_for_generator (generator) - + if source_types and viable_source_types != ['*'] and\ - not set.intersection (source_types, viable_source_types): + not set_.intersection (source_types, viable_source_types): if project.manager ().logger ().on (): - id = generator.id () + id = generator.id () project.manager ().logger ().log (__name__, "generator '%s' pruned" % id) project.manager ().logger ().log (__name__, "source_types" '%s' % source_types) project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types) - + return [] else: @@ -856,10 +948,18 @@ def try_one_generator (project, name, generator, target_type, properties, source def construct_types (project, name, target_types, prop_set, sources): - + + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert is_iterable_typed(target_types, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + result = [] usage_requirements = property_set.empty() - + for t in target_types: r = construct (project, name, t, prop_set, sources) @@ -870,7 +970,7 @@ def construct_types (project, name, target_types, prop_set, sources): # TODO: have to introduce parameter controlling if # several types can be matched and add appropriate - # checks + # checks # TODO: need to review the documentation for # 'construct' to see if it should return $(source) even @@ -883,9 +983,10 @@ def construct_types (project, name, target_types, prop_set, sources): return (usage_requirements, sources) def __ensure_type (targets): - """ Ensures all 'targets' have types. If this is not so, exists with + """ Ensures all 'targets' have types. If this is not so, exists with error. """ + assert is_iterable_typed(targets, virtual_target.VirtualTarget) for t in targets: if not t.type (): get_manager().errors()("target '%s' has no type" % str (t)) @@ -898,24 +999,26 @@ def find_viable_generators_aux (target_type, prop_set): - for each type find all generators that generate that type and which requirements are satisfied by properties. - if the set of generators is not empty, returns that set. - + Note: this algorithm explicitly ignores generators for base classes if there's at least one generator for requested target_type. """ + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) # Select generators that can create the required target type. viable_generators = [] initial_generators = [] - import type + from . import type # Try all-type generators first. Assume they have # quite specific requirements. all_bases = type.all_bases(target_type) - + for t in all_bases: - + initial_generators = __type_to_generators.get(t, []) - + if initial_generators: dout("there are generators for this type") if t != target_type: @@ -933,22 +1036,24 @@ def find_viable_generators_aux (target_type, prop_set): ng = g.clone_and_change_target_type(t, target_type) generators2.append(ng) register(ng) - + initial_generators = generators2 break - + for g in initial_generators: dout("trying generator " + g.id() + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")") - + m = g.match_rank(prop_set) if m: dout(" is viable") - viable_generators.append(g) - + viable_generators.append(g) + return viable_generators def find_viable_generators (target_type, prop_set): + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) key = target_type + '.' + str (prop_set) l = __viable_generators_cache.get (key, None) @@ -971,15 +1076,15 @@ def find_viable_generators (target_type, prop_set): # Generators which override 'all'. all_overrides = [] - + # Generators which are overriden - overriden_ids = [] + overriden_ids = [] for g in viable_generators: id = g.id () - + this_overrides = __overrides.get (id, []) - + if this_overrides: overriden_ids.extend (this_overrides) if 'all' in this_overrides: @@ -989,24 +1094,31 @@ def find_viable_generators (target_type, prop_set): viable_generators = all_overrides return [g for g in viable_generators if not g.id() in overriden_ids] - + def __construct_really (project, name, target_type, prop_set, sources): """ Attempts to construct target by finding viable generators, running them and selecting the dependency graph. """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) viable_generators = find_viable_generators (target_type, prop_set) - + result = [] dout(" *** %d viable generators" % len (viable_generators)) generators_that_succeeded = [] - + for g in viable_generators: - __active_generators.append(g) + __active_generators.append(g) r = try_one_generator (project, name, g, target_type, prop_set, sources) del __active_generators[-1] - + if r: generators_that_succeeded.append(g) if result: @@ -1027,7 +1139,7 @@ def __construct_really (project, name, target_type, prop_set, sources): get_manager().errors()(output.getvalue()) else: result = r; - + return result; @@ -1036,19 +1148,26 @@ def construct (project, name, target_type, prop_set, sources, top_level=False): from 'sources'. The 'sources' are treated as a collection of *possible* ingridients -- i.e. it is not required to consume them all. If 'multiple' is true, the rule is allowed to return - several targets of 'target-type'. - + several targets of 'target-type'. + Returns a list of target. When this invocation is first instance of 'construct' in stack, returns only targets of requested 'target-type', otherwise, returns also unused sources and additionally generated targets. - + If 'top-level' is set, does not suppress generators that are already used in the stack. This may be useful in cases where a generator has to build a metatarget -- for example a target corresponding to - built tool. + built tool. """ - + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(target_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(top_level, bool) global __active_generators if top_level: saved_active = __active_generators @@ -1057,23 +1176,23 @@ def construct (project, name, target_type, prop_set, sources, top_level=False): global __construct_stack if not __construct_stack: __ensure_type (sources) - + __construct_stack.append (1) increase_indent () if project.manager().logger().on(): dout( "*** construct " + target_type) - + for s in sources: dout(" from " + str(s)) project.manager().logger().log (__name__, " properties: ", prop_set.raw ()) - + result = __construct_really(project, name, target_type, prop_set, sources) decrease_indent() - + __construct_stack = __construct_stack [1:] if top_level: @@ -1086,7 +1205,7 @@ def add_usage_requirements (result, raw_properties): if isinstance (result[0], property_set.PropertySet): return (result[0].add_raw(raw_properties), result[1]) else: - return (propery_set.create(raw-properties), result) + return (property_set.create(raw_properties), result) #if [ class.is-a $(result[1]) : property-set ] #{ # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ; diff --git a/src/build/project.jam b/src/build/project.jam index 83d0377e4..2b3386a13 100644 --- a/src/build/project.jam +++ b/src/build/project.jam @@ -96,7 +96,7 @@ rule load-used-projects ( module-name ) # 'jamroot'. With the latter, we would get duplicate matches on Windows and # would have to eliminate duplicates. JAMROOT ?= [ modules.peek : JAMROOT ] ; -JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot.jam ; +JAMROOT ?= project-root.jam [Jj]amroot [Jj]amroot. [Jj]amroot.jam ; # Loads parent of Jamfile at 'location'. Issues an error if nothing is found. @@ -191,7 +191,7 @@ rule module-name ( jamfile-location ) # Default patterns to search for the Jamfiles to use for build declarations. # JAMFILE = [ modules.peek : JAMFILE ] ; -JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile.jam ; +JAMFILE ?= [Bb]uild.jam [Jj]amfile.v2 [Jj]amfile [Jj]amfile. [Jj]amfile.jam ; # Find the Jamfile at the given location. This returns the exact names of all @@ -1001,6 +1001,32 @@ rule glob-internal ( project : wildcards + : excludes * : rule-name ) } +rule glob-path-root ( root path ) +{ + return [ path.root $(path) $(root) ] ; +} + +rule glob-internal-ex ( project : paths + : wildcards + : excludes * : rule-name ) +{ + # Make the paths we search in absolute, if they aren't already absolute. + # If the given paths are relative, they will be relative to the source + # directory. So that's what we root against. + local source-location + = [ path.root [ $(project).get source-location ] [ path.pwd ] ] ; + local search-paths + = [ sequence.transform project.glob-path-root $(source-location) : $(paths) ] ; + paths + = [ path.$(rule-name) $(search-paths) : $(wildcards) : $(excludes) ] ; + # The paths we have found are absolute, but the names specified in the + # sources list are assumed to be relative to the source directory of the + # corresponding project. Make the results relative to the source again. + local result + = [ sequence.transform path.relative-to $(source-location) : $(paths) ] ; + + return $(result) ; +} + + # This module defines rules common to all projects. # module project-rules @@ -1211,6 +1237,20 @@ module project-rules $(excludes) : glob-tree ] ; } + rule glob-ex ( paths + : wildcards + : excludes * ) + { + import project ; + return [ project.glob-internal-ex [ project.current ] + : $(paths) : $(wildcards) : $(excludes) : glob ] ; + } + + rule glob-tree-ex ( paths + : wildcards + : excludes * ) + { + import project ; + return [ project.glob-internal-ex [ project.current ] + : $(paths) : $(wildcards) : $(excludes) : glob-tree ] ; + } + # Calculates conditional requirements for multiple requirements at once. # This is a shorthand to reduce duplication and to keep an inline # declarative syntax. For example: diff --git a/src/build/project.py b/src/build/project.py index 71bc33fb3..ea8fe0106 100644 --- a/src/build/project.py +++ b/src/build/project.py @@ -40,9 +40,10 @@ # their project id. import b2.util.path +import b2.build.targets from b2.build import property_set, property from b2.build.errors import ExceptionWithUserContext -import b2.build.targets +from b2.manager import get_manager import bjam import b2 @@ -56,7 +57,10 @@ import imp import traceback import b2.util.option as option -from b2.util import record_jam_to_value_mapping, qualify_jam_action +from b2.util import ( + record_jam_to_value_mapping, qualify_jam_action, is_iterable_typed, bjam_signature, + is_iterable) + class ProjectRegistry: @@ -130,6 +134,7 @@ class ProjectRegistry: file and jamfile needed by the loaded one will be loaded recursively. If the jamfile at that location is loaded already, does nothing. Returns the project module for the Jamfile.""" + assert isinstance(jamfile_location, basestring) absolute = os.path.join(os.getcwd(), jamfile_location) absolute = os.path.normpath(absolute) @@ -159,6 +164,7 @@ class ProjectRegistry: return mname def load_used_projects(self, module_name): + assert isinstance(module_name, basestring) # local used = [ modules.peek $(module-name) : .used-projects ] ; used = self.used_projects[module_name] @@ -172,7 +178,7 @@ class ProjectRegistry: def load_parent(self, location): """Loads parent of Jamfile at 'location'. Issues an error if nothing is found.""" - + assert isinstance(location, basestring) found = b2.util.path.glob_in_parents( location, self.JAMROOT + self.JAMFILE) @@ -187,6 +193,8 @@ class ProjectRegistry: """Given 'name' which can be project-id or plain directory name, return project module corresponding to that id or directory. Returns nothing of project is not found.""" + assert isinstance(name, basestring) + assert isinstance(current_location, basestring) project_module = None @@ -214,6 +222,7 @@ class ProjectRegistry: """Returns the name of module corresponding to 'jamfile-location'. If no module corresponds to location yet, associates default module name with that location.""" + assert isinstance(jamfile_location, basestring) module = self.location2module.get(jamfile_location) if not module: # Root the path, so that locations are always umbiguious. @@ -230,6 +239,9 @@ class ProjectRegistry: exact names of all the Jamfiles in the given directory. The optional parent-root argument causes this to search not the given directory but the ones above it up to the directory given in it.""" + assert isinstance(dir, basestring) + assert isinstance(parent_root, (int, bool)) + assert isinstance(no_errors, (int, bool)) # Glob for all the possible Jamfiles according to the match pattern. # @@ -280,6 +292,8 @@ Please consult the documentation at 'http://boost.org/boost-build2'.""" """Load a Jamfile at the given directory. Returns nothing. Will attempt to load the file as indicated by the JAMFILE patterns. Effect of calling this rule twice with the same 'dir' is underfined.""" + assert isinstance(dir, basestring) + assert isinstance(jamfile_module, basestring) # See if the Jamfile is where it should be. is_jamroot = False @@ -359,12 +373,15 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) The caller is required to never call this method twice on the same file. """ + assert isinstance(jamfile_module, basestring) + assert isinstance(file, basestring) self.used_projects[jamfile_module] = [] bjam.call("load", jamfile_module, file) self.load_used_projects(jamfile_module) def is_jamroot(self, basename): + assert isinstance(basename, basestring) match = [ pat for pat in self.JAMROOT if re.match(pat, basename)] if match: return 1 @@ -378,7 +395,9 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) location is the location (directory) of the project to initialize. If not specified, standalone project will be initialized """ - + assert isinstance(module_name, basestring) + assert isinstance(location, basestring) or location is None + assert isinstance(basename, basestring) or basename is None if "--debug-loading" in self.manager.argv(): print "Initializing project '%s'" % module_name @@ -465,6 +484,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) def inherit_attributes(self, project_module, parent_module): """Make 'project-module' inherit attributes of project root and parent module.""" + assert isinstance(project_module, basestring) + assert isinstance(parent_module, basestring) attributes = self.module2attributes[project_module] pattributes = self.module2attributes[parent_module] @@ -502,6 +523,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) def register_id(self, id, module): """Associate the given id with the given project module.""" + assert isinstance(id, basestring) + assert isinstance(module, basestring) self.id2module[id] = module def current(self): @@ -509,11 +532,17 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) return self.current_project def set_current(self, c): + if __debug__: + from .targets import ProjectTarget + assert isinstance(c, ProjectTarget) self.current_project = c def push_current(self, project): """Temporary changes the current project to 'project'. Should be followed by 'pop-current'.""" + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) self.saved_current_project.append(self.current_project) self.current_project = project @@ -524,11 +553,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) def attributes(self, project): """Returns the project-attribute instance for the specified jamfile module.""" + assert isinstance(project, basestring) return self.module2attributes[project] def attribute(self, project, attribute): """Returns the value of the specified attribute in the specified jamfile module.""" + assert isinstance(project, basestring) + assert isinstance(attribute, basestring) try: return self.module2attributes[project].get(attribute) except: @@ -537,10 +569,14 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) def attributeDefault(self, project, attribute, default): """Returns the value of the specified attribute in the specified jamfile module.""" + assert isinstance(project, basestring) + assert isinstance(attribute, basestring) + assert isinstance(default, basestring) or default is None return self.module2attributes[project].getDefault(attribute, default) def target(self, project_module): """Returns the project target corresponding to the 'project-module'.""" + assert isinstance(project_module, basestring) if not self.module2target.has_key(project_module): self.module2target[project_module] = \ b2.build.targets.ProjectTarget(project_module, project_module, @@ -550,6 +586,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) def use(self, id, location): # Use/load a project. + assert isinstance(id, basestring) + assert isinstance(location, basestring) saved_project = self.current_project project_module = self.load(location) declared_id = self.attributeDefault(project_module, "id", "") @@ -564,16 +602,24 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) self.current_module = saved_project - def add_rule(self, name, callable): + def add_rule(self, name, callable_): """Makes rule 'name' available to all subsequently loaded Jamfiles. Calling that rule wil relay to 'callable'.""" - self.project_rules_.add_rule(name, callable) + assert isinstance(name, basestring) + assert callable(callable_) + self.project_rules_.add_rule(name, callable_) def project_rules(self): return self.project_rules_ def glob_internal(self, project, wildcards, excludes, rule_name): + if __debug__: + from .targets import ProjectTarget + assert isinstance(project, ProjectTarget) + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring) or excludes is None + assert isinstance(rule_name, basestring) location = project.get("source-location")[0] result = [] @@ -656,6 +702,8 @@ actual value %s""" % (jamfile_module, saved_project, self.current_project)) since then we might get naming conflicts between standard Python modules and those. """ + assert isinstance(name, basestring) + assert is_iterable_typed(extra_path, basestring) or extra_path is None # See if we loaded module of this name already existing = self.loaded_tool_modules_.get(name) if existing: @@ -774,7 +822,20 @@ class ProjectAttributes: def set(self, attribute, specification, exact=False): """Set the named attribute from the specification given by the user. The value actually set may be different.""" - + assert isinstance(attribute, basestring) + assert isinstance(exact, (int, bool)) + if __debug__ and not exact: + if attribute == 'requirements': + assert (isinstance(specification, property_set.PropertySet) + or all(isinstance(s, basestring) for s in specification)) + elif attribute in ( + 'usage-requirements', 'default-build', 'source-location', 'build-dir', 'id'): + assert is_iterable_typed(specification, basestring) + elif __debug__: + assert ( + isinstance(specification, (property_set.PropertySet, type(None), basestring)) + or all(isinstance(s, basestring) for s in specification) + ) if exact: self.__dict__[attribute] = specification @@ -838,9 +899,11 @@ for project at '%s'""" % (attribute, self.location)) self.__dict__[attribute] = specification def get(self, attribute): + assert isinstance(attribute, basestring) return self.__dict__[attribute] def getDefault(self, attribute, default): + assert isinstance(attribute, basestring) return self.__dict__.get(attribute, default) def dump(self): @@ -876,41 +939,51 @@ class ProjectRules: "error_reporting_wrapper", "add_rule_for_type", "reverse"]] self.all_names_ = [x for x in self.local_names] - def _import_rule(self, bjam_module, name, callable): - if hasattr(callable, "bjam_signature"): - bjam.import_rule(bjam_module, name, self.make_wrapper(callable), callable.bjam_signature) + def _import_rule(self, bjam_module, name, callable_): + assert isinstance(bjam_module, basestring) + assert isinstance(name, basestring) + assert callable(callable_) + if hasattr(callable_, "bjam_signature"): + bjam.import_rule(bjam_module, name, self.make_wrapper(callable_), callable_.bjam_signature) else: - bjam.import_rule(bjam_module, name, self.make_wrapper(callable)) + bjam.import_rule(bjam_module, name, self.make_wrapper(callable_)) def add_rule_for_type(self, type): + assert isinstance(type, basestring) rule_name = type.lower().replace("_", "-") - def xpto (name, sources = [], requirements = [], default_build = [], usage_requirements = []): + @bjam_signature([['name'], ['sources', '*'], ['requirements', '*'], + ['default_build', '*'], ['usage_requirements', '*']]) + def xpto (name, sources=[], requirements=[], default_build=[], usage_requirements=[]): + return self.manager_.targets().create_typed_target( - type, self.registry.current(), name[0], sources, + type, self.registry.current(), name, sources, requirements, default_build, usage_requirements) self.add_rule(rule_name, xpto) - def add_rule(self, name, callable): - self.rules[name] = callable + def add_rule(self, name, callable_): + assert isinstance(name, basestring) + assert callable(callable_) + self.rules[name] = callable_ self.all_names_.append(name) # Add new rule at global bjam scope. This might not be ideal, # added because if a jamroot does 'import foo' where foo calls # add_rule, we need to import new rule to jamroot scope, and # I'm lazy to do this now. - self._import_rule("", name, callable) + self._import_rule("", name, callable_) def all_names(self): return self.all_names_ - def call_and_report_errors(self, callable, *args, **kw): + def call_and_report_errors(self, callable_, *args, **kw): + assert callable(callable_) result = None try: self.manager_.errors().push_jamfile_context() - result = callable(*args, **kw) + result = callable_(*args, **kw) except ExceptionWithUserContext, e: e.report() except Exception, e: @@ -923,16 +996,18 @@ class ProjectRules: return result - def make_wrapper(self, callable): + def make_wrapper(self, callable_): """Given a free-standing function 'callable', return a new callable that will call 'callable' and report all exceptins, using 'call_and_report_errors'.""" + assert callable(callable_) def wrapper(*args, **kw): - return self.call_and_report_errors(callable, *args, **kw) + return self.call_and_report_errors(callable_, *args, **kw) return wrapper def init_project(self, project_module, python_standalone=False): - + assert isinstance(project_module, basestring) + assert isinstance(python_standalone, bool) if python_standalone: m = sys.modules[project_module] @@ -961,7 +1036,7 @@ class ProjectRules: self._import_rule(project_module, n, self.rules[n]) def project(self, *args): - + assert is_iterable(args) and all(is_iterable(arg) for arg in args) jamfile_module = self.registry.current().project_module() attributes = self.registry.attributes(jamfile_module) @@ -1017,7 +1092,8 @@ attribute is allowed only for top-level 'project' invocations""") """Declare and set a project global constant. Project global constants are normal variables but should not be changed. They are applied to every child Jamfile.""" - m = "Jamfile" + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(value, basestring) self.registry.current().add_constant(name[0], value) def path_constant(self, name, value): @@ -1025,6 +1101,8 @@ attribute is allowed only for top-level 'project' invocations""") path is adjusted to be relative to the invocation directory. The given value path is taken to be either absolute, or relative to this project root.""" + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(value, basestring) if len(value) > 1: self.registry.manager.error()("path constant should have one element") self.registry.current().add_constant(name[0], value[0], path=1) @@ -1032,27 +1110,35 @@ attribute is allowed only for top-level 'project' invocations""") def use_project(self, id, where): # See comment in 'load' for explanation why we record the # parameters as opposed to loading the project now. - m = self.registry.current().project_module(); + assert is_iterable_typed(id, basestring) + assert is_iterable_typed(where, basestring) + m = self.registry.current().project_module() self.registry.used_projects[m].append((id[0], where[0])) def build_project(self, dir): - assert(isinstance(dir, list)) + assert is_iterable_typed(dir, basestring) jamfile_module = self.registry.current().project_module() attributes = self.registry.attributes(jamfile_module) now = attributes.get("projects-to-build") attributes.set("projects-to-build", now + dir, exact=True) def explicit(self, target_names): + assert is_iterable_typed(target_names, basestring) self.registry.current().mark_targets_as_explicit(target_names) def always(self, target_names): + assert is_iterable_typed(target_names, basestring) self.registry.current().mark_targets_as_alays(target_names) def glob(self, wildcards, excludes=None): + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring)or excludes is None return self.registry.glob_internal(self.registry.current(), wildcards, excludes, "glob") def glob_tree(self, wildcards, excludes=None): + assert is_iterable_typed(wildcards, basestring) + assert is_iterable_typed(excludes, basestring) or excludes is None bad = 0 for p in wildcards: if os.path.dirname(p): @@ -1076,6 +1162,7 @@ attribute is allowed only for top-level 'project' invocations""") # will expect the module to be found even though # the directory is not in BOOST_BUILD_PATH. # So temporary change the search path. + assert is_iterable_typed(toolset, basestring) current = self.registry.current() location = current.get('location') @@ -1090,7 +1177,9 @@ attribute is allowed only for top-level 'project' invocations""") self.registry.set_current(current) def import_(self, name, names_to_import=None, local_names=None): - + assert is_iterable_typed(name, basestring) + assert is_iterable_typed(names_to_import, basestring) or names_to_import is None + assert is_iterable_typed(local_names, basestring)or local_names is None name = name[0] py_name = name if py_name == "os": @@ -1133,7 +1222,8 @@ attribute is allowed only for top-level 'project' invocations""") lib x : x.cpp : [ conditional gcc debug : DEBUG_EXCEPTION DEBUG_TRACE ] ; """ - + assert is_iterable_typed(condition, basestring) + assert is_iterable_typed(requirements, basestring) c = string.join(condition, ",") if c.find(":") != -1: return [c + r for r in requirements] @@ -1141,6 +1231,8 @@ attribute is allowed only for top-level 'project' invocations""") return [c + ":" + r for r in requirements] def option(self, name, value): + assert is_iterable(name) and isinstance(name[0], basestring) + assert is_iterable(value) and isinstance(value[0], basestring) name = name[0] if not name in ["site-config", "user-config", "project-config"]: get_manager().errors()("The 'option' rule may be used only in site-config or user-config") diff --git a/src/build/property.jam b/src/build/property.jam index ff28dfd20..78a9744b1 100644 --- a/src/build/property.jam +++ b/src/build/property.jam @@ -237,12 +237,15 @@ rule as-path ( properties * ) local components ; for local p in $(properties) { - if $(p:G) + if ! hidden in [ feature.attributes $(p:G) ] { - local f = [ utility.ungrist $(p:G) ] ; - p = $(f)-$(p:G=) ; + if $(p:G) + { + local f = [ utility.ungrist $(p:G) ] ; + p = $(f)-$(p:G=) ; + } + components += [ $(.abbrev) $(p) ] ; } - components += [ $(.abbrev) $(p) ] ; } $(entry) = $(components:J=/) ; diff --git a/src/build/property.py b/src/build/property.py index f851c9e5e..11a18ff38 100644 --- a/src/build/property.py +++ b/src/build/property.py @@ -1,17 +1,17 @@ # Status: ported, except for tests. # Base revision: 64070 # -# Copyright 2001, 2002, 2003 Dave Abrahams -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2001, 2002, 2003 Dave Abrahams +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import re import sys from b2.util.utility import * from b2.build import feature -from b2.util import sequence, qualify_jam_action +from b2.util import sequence, qualify_jam_action, is_iterable_typed import b2.util.set from b2.manager import get_manager @@ -41,7 +41,7 @@ class Property(object): self._feature = f self._value = value self._condition = condition - + def feature(self): return self._feature @@ -70,7 +70,9 @@ class Property(object): def create_from_string(s, allow_condition=False,allow_missing_value=False): - + assert isinstance(s, basestring) + assert isinstance(allow_condition, bool) + assert isinstance(allow_missing_value, bool) condition = [] import types if not isinstance(s, types.StringType): @@ -92,7 +94,7 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False): if feature.is_implicit_value(s): f = feature.implied_feature(s) value = s - else: + else: raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s) else: if feature.valid(feature_name): @@ -119,11 +121,11 @@ def create_from_string(s, allow_condition=False,allow_missing_value=False): if condition: condition = [create_from_string(x) for x in condition.split(',')] - + return Property(f, value, condition) def create_from_strings(string_list, allow_condition=False): - + assert is_iterable_typed(string_list, basestring) return [create_from_string(s, allow_condition) for s in string_list] def reset (): @@ -153,7 +155,7 @@ def path_order (x, y): """ if x == y: return 0 - + xg = get_grist (x) yg = get_grist (y) @@ -164,10 +166,10 @@ def path_order (x, y): return 1 else: - if not xg: + if not xg: x = feature.expand_subfeatures([x]) y = feature.expand_subfeatures([y]) - + if x < y: return -1 elif x > y: @@ -176,21 +178,23 @@ def path_order (x, y): return 0 def identify(string): - return string + return string # Uses Property def refine (properties, requirements): - """ Refines 'properties' by overriding any non-free properties - for which a different value is specified in 'requirements'. + """ Refines 'properties' by overriding any non-free properties + for which a different value is specified in 'requirements'. Conditional requirements are just added without modification. Returns the resulting list of properties. """ + assert is_iterable_typed(properties, Property) + assert is_iterable_typed(requirements, Property) # The result has no duplicates, so we store it in a set result = set() - + # Records all requirements. required = {} - + # All the elements of requirements should be present in the result # Record them so that we can handle 'properties'. for r in requirements: @@ -224,14 +228,14 @@ def translate_paths (properties, path): if p.feature().path(): values = __re_two_ampersands.split(p.value()) - + new_value = "&&".join(os.path.join(path, v) for v in values) if new_value != p.value(): result.append(Property(p.feature(), new_value, p.condition())) else: result.append(p) - + else: result.append (p) @@ -242,6 +246,8 @@ def translate_indirect(properties, context_module): names of rules, used in 'context-module'. Such rules can be either local to the module or global. Qualified local rules with the name of the module.""" + assert is_iterable_typed(properties, Property) + assert isinstance(context_module, basestring) result = [] for p in properties: if p.value()[0] == '@': @@ -257,15 +263,14 @@ def validate (properties): """ Exit with error if any of the properties is not valid. properties may be a single property or a sequence of properties. """ - - if isinstance (properties, str): - __validate1 (properties) - else: - for p in properties: - __validate1 (p) + if isinstance(properties, Property): + properties = [properties] + assert is_iterable_typed(properties, Property) + for p in properties: + __validate1(p) def expand_subfeatures_in_conditions (properties): - + assert is_iterable_typed(properties, Property) result = [] for p in properties: @@ -296,8 +301,9 @@ def split_conditional (property): debug,gcc full. Otherwise, returns empty string. """ + assert isinstance(property, basestring) m = __re_split_conditional.match (property) - + if m: return (m.group (1), '<' + m.group (2)) @@ -307,14 +313,18 @@ def split_conditional (property): def select (features, properties): """ Selects properties which correspond to any of the given features. """ + assert is_iterable_typed(properties, basestring) result = [] - + # add any missing angle brackets features = add_grist (features) return [p for p in properties if get_grist(p) in features] def validate_property_sets (sets): + if __debug__: + from .property_set import PropertySet + assert is_iterable_typed(sets, PropertySet) for s in sets: validate(s.all()) @@ -323,6 +333,10 @@ def evaluate_conditionals_in_context (properties, context): For those with met conditions, removes the condition. Properies in conditions are looked up in 'context' """ + if __debug__: + from .property_set import PropertySet + assert is_iterable_typed(properties, Property) + assert isinstance(context, PropertySet) base = [] conditional = [] @@ -348,8 +362,11 @@ def change (properties, feature, value = None): given feature replaced by the given value. If 'value' is None the feature will be removed. """ + assert is_iterable_typed(properties, basestring) + assert isinstance(feature, basestring) + assert isinstance(value, (basestring, type(None))) result = [] - + feature = add_grist (feature) for p in properties: @@ -368,7 +385,8 @@ def change (properties, feature, value = None): def __validate1 (property): """ Exit with error if property is not valid. - """ + """ + assert isinstance(property, Property) msg = None if not property.feature().free(): @@ -379,7 +397,7 @@ def __validate1 (property): # Still to port. # Original lines are prefixed with "# " # -# +# # import utility : ungrist ; # import sequence : unique ; # import errors : error ; @@ -389,8 +407,8 @@ def __validate1 (property): # import set ; # import path ; # import assert ; -# -# +# +# # rule validate-property-sets ( property-sets * ) @@ -405,7 +423,10 @@ def __validate1 (property): def remove(attributes, properties): """Returns a property sets which include all the elements in 'properties' that do not have attributes listed in 'attributes'.""" - + if isinstance(attributes, basestring): + attributes = [attributes] + assert is_iterable_typed(attributes, basestring) + assert is_iterable_typed(properties, basestring) result = [] for e in properties: attributes_new = feature.attributes(get_grist(e)) @@ -424,6 +445,8 @@ def remove(attributes, properties): def take(attributes, properties): """Returns a property set which include all properties in 'properties' that have any of 'attributes'.""" + assert is_iterable_typed(attributes, basestring) + assert is_iterable_typed(properties, basestring) result = [] for e in properties: if b2.util.set.intersection(attributes, feature.attributes(get_grist(e))): @@ -431,7 +454,9 @@ def take(attributes, properties): return result def translate_dependencies(properties, project_id, location): - + assert is_iterable_typed(properties, Property) + assert isinstance(project_id, basestring) + assert isinstance(location, basestring) result = [] for p in properties: @@ -447,10 +472,10 @@ def translate_dependencies(properties, project_id, location): pass else: rooted = os.path.join(os.getcwd(), location, rooted) - + result.append(Property(p.feature(), rooted + "//" + m.group(2), p.condition())) - - elif os.path.isabs(v): + + elif os.path.isabs(v): result.append(p) else: result.append(Property(p.feature(), project_id + "//" + v, p.condition())) @@ -464,10 +489,12 @@ class PropertyMap: def __init__ (self): self.__properties = [] self.__values = [] - + def insert (self, properties, value): """ Associate value with properties. """ + assert is_iterable_typed(properties, basestring) + assert isinstance(value, basestring) self.__properties.append(properties) self.__values.append(value) @@ -477,15 +504,18 @@ class PropertyMap: subset has value assigned to it, return the value for the longest subset, if it's unique. """ + assert is_iterable_typed(properties, basestring) return self.find_replace (properties) def find_replace(self, properties, value=None): + assert is_iterable_typed(properties, basestring) + assert isinstance(value, (basestring, type(None))) matches = [] match_ranks = [] - + for i in range(0, len(self.__properties)): p = self.__properties[i] - + if b2.util.set.contains (p, properties): matches.append (i) match_ranks.append(len(p)) @@ -499,7 +529,7 @@ class PropertyMap: raise NoBestMatchingAlternative () best = best [0] - + original = self.__values[best] if value: @@ -512,12 +542,12 @@ class PropertyMap: # import errors : try catch ; # import feature ; # import feature : feature subfeature compose ; -# +# # # local rules must be explicitly re-imported # import property : path-order ; -# +# # feature.prepare-test property-test-temp ; -# +# # feature toolset : gcc : implicit symmetric ; # subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4 # 3.0 3.0.1 3.0.2 : optional ; @@ -526,98 +556,98 @@ class PropertyMap: # feature optimization : on off ; # feature variant : debug release : implicit composite symmetric ; # feature rtti : on off : link-incompatible ; -# +# # compose debug : _DEBUG off ; # compose release : NDEBUG on ; -# +# # import assert ; # import "class" : new ; -# +# # validate gcc gcc-3.0.1 : $(test-space) ; -# +# # assert.result gcc off FOO # : refine gcc off # : FOO # : $(test-space) # ; -# +# # assert.result gcc on # : refine gcc off # : on # : $(test-space) # ; -# +# # assert.result gcc off # : refine gcc : off : $(test-space) # ; -# +# # assert.result gcc off off:FOO -# : refine gcc : off off:FOO +# : refine gcc : off off:FOO # : $(test-space) # ; -# -# assert.result gcc:foo gcc:bar -# : refine gcc:foo : gcc:bar +# +# assert.result gcc:foo gcc:bar +# : refine gcc:foo : gcc:bar # : $(test-space) # ; -# +# # assert.result MY_RELEASE -# : evaluate-conditionals-in-context +# : evaluate-conditionals-in-context # release,off:MY_RELEASE # : gcc release off -# +# # ; -# +# # try ; # validate value : $(test-space) ; # catch "Invalid property 'value': unknown feature 'feature'." ; -# +# # try ; # validate default : $(test-space) ; # catch \"default\" is not a known value of feature ; -# +# # validate WHATEVER : $(test-space) ; -# +# # try ; # validate : $(test-space) ; # catch "Invalid property '': No value specified for feature 'rtti'." ; -# +# # try ; # validate value : $(test-space) ; # catch "value" is not a value of an implicit feature ; -# -# -# assert.result on +# +# +# assert.result on # : remove free implicit : gcc foo on : $(test-space) ; -# -# assert.result a +# +# assert.result a # : select include : a gcc ; -# -# assert.result a +# +# assert.result a # : select include bar : a gcc ; -# +# # assert.result a gcc # : select include : a gcc ; -# -# assert.result kylix a +# +# assert.result kylix a # : change gcc a : kylix ; -# -# # Test ordinary properties -# assert.result -# : split-conditional gcc +# +# # Test ordinary properties +# assert.result +# : split-conditional gcc # ; -# +# # # Test properties with ":" # assert.result # : split-conditional FOO=A::B # ; -# +# # # Test conditional feature # assert.result gcc,3.0 FOO # : split-conditional gcc,3.0:FOO # ; -# +# # feature.finish-test property-test-temp ; # } -# - +# + diff --git a/src/build/property_set.py b/src/build/property_set.py index 37fe46631..494a5b1b7 100644 --- a/src/build/property_set.py +++ b/src/build/property_set.py @@ -8,6 +8,7 @@ import hashlib +import bjam from b2.util.utility import * import property, feature import b2.build.feature @@ -15,7 +16,7 @@ from b2.exceptions import * from b2.build.property import get_abbreviated_paths from b2.util.sequence import unique from b2.util.set import difference -from b2.util import cached, abbreviate_dashed +from b2.util import cached, abbreviate_dashed, is_iterable_typed from b2.manager import get_manager @@ -36,6 +37,8 @@ def create (raw_properties = []): """ Creates a new 'PropertySet' instance for the given raw properties, or returns an already existing one. """ + assert (is_iterable_typed(raw_properties, property.Property) + or is_iterable_typed(raw_properties, basestring)) # FIXME: propagate to callers. if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property): x = raw_properties @@ -58,6 +61,7 @@ def create_with_validation (raw_properties): that all properties are valid and converting implicit properties into gristed form. """ + assert is_iterable_typed(raw_properties, basestring) properties = [property.create_from_string(s) for s in raw_properties] property.validate(properties) @@ -71,7 +75,9 @@ def empty (): def create_from_user_input(raw_properties, jamfile_module, location): """Creates a property-set from the input given by the user, in the context of 'jamfile-module' at 'location'""" - + assert is_iterable_typed(raw_properties, basestring) + assert isinstance(jamfile_module, basestring) + assert isinstance(location, basestring) properties = property.create_from_strings(raw_properties, True) properties = property.translate_paths(properties, location) properties = property.translate_indirect(properties, jamfile_module) @@ -95,7 +101,10 @@ def refine_from_user_input(parent_requirements, specification, jamfile_module, - project-module -- the module to which context indirect features will be bound. - location -- the path to which path features are relative.""" - + assert isinstance(parent_requirements, PropertySet) + assert is_iterable_typed(specification, basestring) + assert isinstance(jamfile_module, basestring) + assert isinstance(location, basestring) if not specification: return parent_requirements @@ -146,7 +155,7 @@ class PropertySet: caching whenever possible. """ def __init__ (self, properties = []): - + assert is_iterable_typed(properties, property.Property) raw_properties = [] for p in properties: @@ -304,6 +313,7 @@ class PropertySet: return self.subfeatures_ def evaluate_conditionals(self, context=None): + assert isinstance(context, (PropertySet, type(None))) if not context: context = self @@ -410,6 +420,7 @@ class PropertySet: """ Creates a new property set containing the properties in this one, plus the ones of the property set passed as argument. """ + assert isinstance(ps, PropertySet) if not self.added_.has_key(ps): self.added_[ps] = create(self.all_ + ps.all()) return self.added_[ps] @@ -428,6 +439,7 @@ class PropertySet: feature = feature[0] if not isinstance(feature, b2.build.feature.Feature): feature = b2.build.feature.get(feature) + assert isinstance(feature, b2.build.feature.Feature) if not self.feature_map_: self.feature_map_ = {} @@ -442,9 +454,9 @@ class PropertySet: @cached def get_properties(self, feature): """Returns all contained properties associated with 'feature'""" - if not isinstance(feature, b2.build.feature.Feature): feature = b2.build.feature.get(feature) + assert isinstance(feature, b2.build.feature.Feature) result = [] for p in self.all_: @@ -454,7 +466,7 @@ class PropertySet: def __contains__(self, item): return item in self.all_set_ - + def hash(p): m = hashlib.md5() m.update(p) diff --git a/src/build/scanner.py b/src/build/scanner.py index 19f1431d4..ada5d8325 100644 --- a/src/build/scanner.py +++ b/src/build/scanner.py @@ -1,10 +1,10 @@ # Status: ported. # Base revision: 45462 -# -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Implements scanners: objects that compute implicit dependencies for # files, such as includes in C++. @@ -19,10 +19,10 @@ # then associated with actual targets. It is possible to use # several scanners for a virtual-target. For example, a single source # might be used by to compile actions, with different include paths. -# In this case, two different actual targets will be created, each +# In this case, two different actual targets will be created, each # having scanner of its own. # -# Typically, scanners are created from target type and action's +# Typically, scanners are created from target type and action's # properties, using the rule 'get' in this module. Directly creating # scanners is not recommended, because it might create many equvivalent # but different instances, and lead in unneeded duplication of @@ -34,6 +34,8 @@ import bjam import os from b2.exceptions import * from b2.manager import get_manager +from b2.util import is_iterable_typed + def reset (): """ Clear the module state. This is mainly for testing purposes. @@ -42,33 +44,37 @@ def reset (): # Maps registered scanner classes to relevant properties __scanners = {} - + # A cache of scanners. - # The key is: class_name.properties_tag, where properties_tag is the concatenation + # The key is: class_name.properties_tag, where properties_tag is the concatenation # of all relevant properties, separated by '-' __scanner_cache = {} - + reset () def register(scanner_class, relevant_properties): - """ Registers a new generator class, specifying a set of + """ Registers a new generator class, specifying a set of properties relevant to this scanner. Ctor for that class should have one parameter: list of properties. """ + assert issubclass(scanner_class, Scanner) + assert isinstance(relevant_properties, basestring) __scanners[str(scanner_class)] = relevant_properties def registered(scanner_class): """ Returns true iff a scanner of that class is registered """ return __scanners.has_key(str(scanner_class)) - + def get(scanner_class, properties): """ Returns an instance of previously registered scanner with the specified properties. """ + assert issubclass(scanner_class, Scanner) + assert is_iterable_typed(properties, basestring) scanner_name = str(scanner_class) - + if not registered(scanner_name): raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name) @@ -76,18 +82,18 @@ def get(scanner_class, properties): r = property.select(relevant_properties, properties) scanner_id = scanner_name + '.' + '-'.join(r) - - if not __scanner_cache.has_key(scanner_name): - __scanner_cache[scanner_name] = scanner_class(r) - return __scanner_cache[scanner_name] + if not __scanner_cache.has_key(scanner_id): + __scanner_cache[scanner_id] = scanner_class(r) + + return __scanner_cache[scanner_id] class Scanner: """ Base scanner class. """ def __init__ (self): pass - + def pattern (self): """ Returns a pattern to use for scanning. """ @@ -120,16 +126,19 @@ class CommonScanner(Scanner): get_manager().scanners().propagate(self, matches) class ScannerRegistry: - + def __init__ (self, manager): self.manager_ = manager self.count_ = 0 self.exported_scanners_ = {} def install (self, scanner, target, vtarget): - """ Installs the specified scanner on actual target 'target'. + """ Installs the specified scanner on actual target 'target'. vtarget: virtual target from which 'target' was actualized. """ + assert isinstance(scanner, Scanner) + assert isinstance(target, basestring) + assert isinstance(vtarget, basestring) engine = self.manager_.engine() engine.set_target_variable(target, "HDRSCAN", scanner.pattern()) if not self.exported_scanners_.has_key(scanner): @@ -141,8 +150,8 @@ class ScannerRegistry: exported_name = self.exported_scanners_[scanner] engine.set_target_variable(target, "HDRRULE", exported_name) - - # scanner reflects difference in properties affecting + + # scanner reflects difference in properties affecting # binding of 'target', which will be known when processing # includes for it, will give information on how to # interpret quoted includes. @@ -150,6 +159,8 @@ class ScannerRegistry: pass def propagate(self, scanner, targets): + assert isinstance(scanner, Scanner) + assert is_iterable_typed(targets, basestring) or isinstance(targets, basestring) engine = self.manager_.engine() engine.set_target_variable(targets, "HDRSCAN", scanner.pattern()) engine.set_target_variable(targets, "HDRRULE", diff --git a/src/build/targets.jam b/src/build/targets.jam index 44c8fc9e4..2cfe08e05 100644 --- a/src/build/targets.jam +++ b/src/build/targets.jam @@ -336,6 +336,23 @@ class project-target : abstract-target created. : in project [ full-name ] ; } self.alternatives += $(target-instance) ; + if ! ( [ $(target-instance).name ] in $(self.alternative-names) ) + { + self.alternative-names += [ $(target-instance).name ] ; + } + } + + # Checks if an alternative was declared for the target. + # Unlike checking for a main target this does not require + # building the main targets. And hence can be used in/directly + # while loading a project. + # + rule has-alternative-for-target ( target-name ) + { + if $(target-name) in $(self.alternative-names) + { + return 1 ; + } } # Returns a 'main-target' class instance corresponding to 'name'. diff --git a/src/build/targets.py b/src/build/targets.py index acf10e4fd..043d90666 100644 --- a/src/build/targets.py +++ b/src/build/targets.py @@ -10,10 +10,10 @@ # Supports 'abstract' targets, which are targets explicitly defined in Jamfile. # -# Abstract targets are represented by classes derived from 'AbstractTarget' class. +# Abstract targets are represented by classes derived from 'AbstractTarget' class. # The first abstract target is 'project_target', which is created for each # Jamfile, and can be obtained by the 'target' rule in the Jamfile's module. -# (see project.jam). +# (see project.jam). # # Project targets keep a list of 'MainTarget' instances. # A main target is what the user explicitly defines in a Jamfile. It is @@ -36,34 +36,34 @@ # |AbstractTarget | # +========================+ # |name | -# |project | -# | | -# |generate(properties) = 0| -# +-----------+------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# +------------------------+------+------------------------------+ -# | | | -# | | | -# +----------+-----------+ +------+------+ +------+-------+ -# | project_target | | MainTarget | | BasicTarget | -# +======================+ 1 * +=============+ alternatives +==============+ -# | generate(properties) |o-----------+ generate |<>------------->| generate | +# |project | +# | | +# |generate(properties) = 0| +# +-----------+------------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# +------------------------+------+------------------------------+ +# | | | +# | | | +# +----------+-----------+ +------+------+ +------+-------+ +# | project_target | | MainTarget | | BasicTarget | +# +======================+ 1 * +=============+ alternatives +==============+ +# | generate(properties) |o-----------+ generate |<>------------->| generate | # | main-target | +-------------+ | construct = 0| -# +----------------------+ +--------------+ -# | -# ^ -# / \ -# +-+-+ -# | -# | -# ...--+----------------+------------------+----------------+---+ -# | | | | -# | | | | +# +----------------------+ +--------------+ +# | +# ^ +# / \ +# +-+-+ +# | +# | +# ...--+----------------+------------------+----------------+---+ +# | | | | +# | | | | # ... ---+-----+ +------+-------+ +------+------+ +--------+-----+ # | | TypedTarget | | make-target | | stage-target | # . +==============+ +=============+ +==============+ @@ -81,7 +81,7 @@ import property, project, virtual_target, property_set, feature, generators, too from virtual_target import Subvariant from b2.exceptions import * from b2.util.sequence import unique -from b2.util import path, bjam_signature +from b2.util import path, bjam_signature, safe_isinstance, is_iterable_typed from b2.build.errors import user_error_checkpoint import b2.build.build_request as build_request @@ -90,7 +90,7 @@ import b2.util.set _re_separate_target_from_properties = re.compile (r'^([^<]*)(/(<.*))?$') class TargetRegistry: - + def __init__ (self): # All targets that are currently being built. # Only the key is id (target), the value is the actual object. @@ -107,6 +107,7 @@ class TargetRegistry: """ Registers the specified target as a main target alternatives. Returns 'target'. """ + assert isinstance(target, AbstractTarget) target.project ().add_alternative (target) return target @@ -116,12 +117,15 @@ class TargetRegistry: as main target instances, and the name of such targets are adjusted to be '__'. Such renaming is disabled is non-empty value is passed for 'no-renaming' parameter.""" + assert is_iterable_typed(sources, basestring) + assert isinstance(main_target_name, basestring) + assert isinstance(no_renaming, (int, bool)) result = [] for t in sources: t = b2.util.jam_to_value_maybe(t) - + if isinstance (t, AbstractTarget): name = t.name () @@ -131,7 +135,7 @@ class TargetRegistry: # Inline targets are not built by default. p = t.project() - p.mark_targets_as_explicit([name]) + p.mark_targets_as_explicit([name]) result.append(name) else: @@ -145,11 +149,12 @@ class TargetRegistry: which are obtained by - translating all specified property paths, and - refining project requirements with the one specified for the target - + 'specification' are the properties xplicitly specified for a main target 'project' is the project where the main taret is to be declared.""" - + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) specification.extend(toolset.requirements()) requirements = property_set.refine_from_user_input( @@ -166,6 +171,8 @@ class TargetRegistry: specification: Use-properties explicitly specified for a main target project: Project where the main target is to be declared """ + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) project_usage_requirements = project.get ('usage-requirements') # We don't use 'refine-from-user-input' because I'm not sure if: @@ -174,7 +181,7 @@ class TargetRegistry: # are always free. usage_requirements = property_set.create_from_user_input( specification, project.project_module(), project.get("location")) - + return project_usage_requirements.add (usage_requirements) def main_target_default_build (self, specification, project): @@ -184,6 +191,8 @@ class TargetRegistry: specification: Default build explicitly specified for a main target project: Project where the main target is to be declared """ + assert is_iterable_typed(specification, basestring) + assert isinstance(project, ProjectTarget) if specification: return property_set.create_with_validation(specification) else: @@ -192,16 +201,18 @@ class TargetRegistry: def start_building (self, main_target_instance): """ Helper rules to detect cycles in main target references. """ + assert isinstance(main_target_instance, MainTarget) if self.targets_being_built_.has_key(id(main_target_instance)): names = [] for t in self.targets_being_built_.values() + [main_target_instance]: names.append (t.full_name()) - + get_manager().errors()("Recursion in main target references\n") - + self.targets_being_built_[id(main_target_instance)] = main_target_instance def end_building (self, main_target_instance): + assert isinstance(main_target_instance, MainTarget) assert (self.targets_being_built_.has_key (id (main_target_instance))) del self.targets_being_built_ [id (main_target_instance)] @@ -211,6 +222,11 @@ class TargetRegistry: 'usage_requirements' are assumed to be in the form specified by the user in Jamfile corresponding to 'project'. """ + assert isinstance(type, basestring) + assert isinstance(project, ProjectTarget) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) return self.main_target_alternative (TypedTarget (name, project, type, self.main_target_sources (sources, name), self.main_target_requirements (requirements, project), @@ -231,6 +247,7 @@ class TargetRegistry: print self.indent_ + message def push_target(self, target): + assert isinstance(target, AbstractTarget) self.targets_.append(target) def pop_target(self): @@ -241,14 +258,15 @@ class TargetRegistry: class GenerateResult: - + def __init__ (self, ur=None, targets=None): if not targets: targets = [] - + assert isinstance(ur, property_set.PropertySet) or ur is None + assert is_iterable_typed(targets, virtual_target.VirtualTarget) + self.__usage_requirements = ur self.__targets = targets - assert all(isinstance(t, virtual_target.VirtualTarget) for t in targets) if not self.__usage_requirements: self.__usage_requirements = property_set.empty () @@ -258,10 +276,10 @@ class GenerateResult: def targets (self): return self.__targets - + def extend (self, other): assert (isinstance (other, GenerateResult)) - + self.__usage_requirements = self.__usage_requirements.add (other.usage_requirements ()) self.__targets.extend (other.targets ()) @@ -274,12 +292,13 @@ class AbstractTarget: project: the project target to which this one belongs manager:the manager object. If none, uses project.manager () """ + assert isinstance(name, basestring) assert (isinstance (project, ProjectTarget)) # Note: it might seem that we don't need either name or project at all. # However, there are places where we really need it. One example is error # messages which should name problematic targets. Another is setting correct # paths for sources and generated files. - + # Why allow manager to be specified? Because otherwise project target could not derive # from this class. if manager: @@ -288,47 +307,48 @@ class AbstractTarget: self.manager_ = project.manager () self.name_ = name - self.project_ = project - + self.project_ = project + def manager (self): return self.manager_ - + def name (self): """ Returns the name of this target. """ return self.name_ - + def project (self): """ Returns the project for this target. """ return self.project_ - + def location (self): """ Return the location where the target was declared. """ return self.location_ - + def full_name (self): """ Returns a user-readable name for this target. """ location = self.project ().get ('location') return location + '/' + self.name_ - + def generate (self, property_set): """ Takes a property set. Generates virtual targets for this abstract target, using the specified properties, unless a different value of some - feature is required by the target. + feature is required by the target. On success, returns a GenerateResult instance with: - a property_set with the usage requirements to be - applied to dependents + applied to dependents - a list of produced virtual targets, which may be - empty. + empty. If 'property_set' is empty, performs default build of this target, in a way specific to derived class. """ raise BaseException ("method should be defined in derived classes") - + def rename (self, new_name): + assert isinstance(new_name, basestring) self.name_ = new_name class ProjectTarget (AbstractTarget): @@ -346,28 +366,32 @@ class ProjectTarget (AbstractTarget): all alternatives are enumerated an main targets are created. """ def __init__ (self, manager, name, project_module, parent_project, requirements, default_build): + assert isinstance(project_module, basestring) + assert isinstance(parent_project, (ProjectTarget, type(None))) + assert isinstance(requirements, (type(None), property_set.PropertySet)) + assert isinstance(default_build, (type(None), property_set.PropertySet)) AbstractTarget.__init__ (self, name, self, manager) - + self.project_module_ = project_module self.location_ = manager.projects().attribute (project_module, 'location') self.requirements_ = requirements self.default_build_ = default_build - + self.build_dir_ = None - + # A cache of IDs self.ids_cache_ = {} - + # True is main targets have already been built. self.built_main_targets_ = False - + # A list of the registered alternatives for this project. self.alternatives_ = [] # A map from main target name to the target corresponding # to it. self.main_target_ = {} - + # Targets marked as explicit. self.explicit_targets_ = set() @@ -388,8 +412,9 @@ class ProjectTarget (AbstractTarget): # way to make 'make' work without this method. def project_module (self): return self.project_module_ - + def get (self, attribute): + assert isinstance(attribute, basestring) return self.manager().projects().attribute( self.project_module_, attribute) @@ -404,16 +429,17 @@ class ProjectTarget (AbstractTarget): def generate (self, ps): """ Generates all possible targets contained in this project. """ + assert isinstance(ps, property_set.PropertySet) self.manager_.targets().log( "Building project '%s' with '%s'" % (self.name (), str(ps))) self.manager_.targets().increase_indent () - + result = GenerateResult () - + for t in self.targets_to_build (): g = t.generate (ps) result.extend (g) - + self.manager_.targets().decrease_indent () return result @@ -422,10 +448,10 @@ class ProjectTarget (AbstractTarget): must be built when this project is built. """ result = [] - + if not self.built_main_targets_: self.build_main_targets () - + # Collect all main targets here, except for "explicit" ones. for n, t in self.main_target_.iteritems (): if not t.name () in self.explicit_targets_: @@ -435,29 +461,33 @@ class ProjectTarget (AbstractTarget): self_location = self.get ('location') for pn in self.get ('projects-to-build'): result.append (self.find(pn + "/")) - + return result def mark_targets_as_explicit (self, target_names): """Add 'target' to the list of targets in this project that should be build only by explicit request.""" - + # Record the name of the target, not instance, since this # rule is called before main target instaces are created. + assert is_iterable_typed(target_names, basestring) self.explicit_targets_.update(target_names) def mark_targets_as_always(self, target_names): + assert is_iterable_typed(target_names, basestring) self.always_targets_.update(target_names) - + def add_alternative (self, target_instance): """ Add new target alternative. """ + assert isinstance(target_instance, AbstractTarget) if self.built_main_targets_: raise IllegalOperation ("add-alternative called when main targets are already created for project '%s'" % self.full_name ()) self.alternatives_.append (target_instance) def main_target (self, name): + assert isinstance(name, basestring) if not self.built_main_targets_: self.build_main_targets() @@ -465,17 +495,19 @@ class ProjectTarget (AbstractTarget): def has_main_target (self, name): """Tells if a main target with the specified name exists.""" + assert isinstance(name, basestring) if not self.built_main_targets_: self.build_main_targets() return self.main_target_.has_key(name) - + def create_main_target (self, name): """ Returns a 'MainTarget' class instance corresponding to the 'name'. """ + assert isinstance(name, basestring) if not self.built_main_targets_: self.build_main_targets () - + return self.main_targets_.get (name, None) @@ -483,7 +515,9 @@ class ProjectTarget (AbstractTarget): """ Find and return the target with the specified id, treated relative to self. """ - result = None + assert isinstance(id, basestring) + + result = None current_location = self.get ('location') __re_split_project_target = re.compile (r'(.*)//(.*)') @@ -497,13 +531,13 @@ class ProjectTarget (AbstractTarget): target_part = split.group (2) project_registry = self.project_.manager ().projects () - + extra_error_message = '' if project_part: # There's explicit project part in id. Looks up the # project and pass the request to it. pm = project_registry.find (project_part, current_location) - + if pm: project_target = project_registry.target (pm) result = project_target.find (target_part, no_error=1) @@ -520,7 +554,7 @@ class ProjectTarget (AbstractTarget): # # After first build we'll have target 'test' in Jamfile and file # 'test' on the disk. We need target to override the file. - + result = None if self.has_main_target(id): result = self.main_target(id) @@ -531,19 +565,21 @@ class ProjectTarget (AbstractTarget): # File actually does not exist. # Reset 'target' so that an error is issued. result = None - + if not result: # Interpret id as project-id project_module = project_registry.find (id, current_location) if project_module: result = project_registry.target (project_module) - + return result def find (self, id, no_error = False): + assert isinstance(id, basestring) + assert isinstance(no_error, int) # also matches bools v = self.ids_cache_.get (id, None) - + if not v: v = self.find_really (id) self.ids_cache_ [id] = v @@ -553,10 +589,10 @@ class ProjectTarget (AbstractTarget): raise BaseException ("Unable to find file or target named '%s'\nreferred from project at '%s'" % (id, self.get ('location'))) - + def build_main_targets (self): self.built_main_targets_ = True - + for a in self.alternatives_: name = a.name () if not self.main_target_.has_key (name): @@ -565,7 +601,7 @@ class ProjectTarget (AbstractTarget): if name in self.always_targets_: a.always() - + self.main_target_ [name].add_alternative (a) def add_constant(self, name, value, path=0): @@ -576,17 +612,19 @@ class ProjectTarget (AbstractTarget): the constant will be interpreted relatively to the location of project. """ - + assert isinstance(name, basestring) + assert isinstance(value, basestring) + assert isinstance(path, int) # will also match bools if path: l = self.location_ if not l: - # Project corresponding to config files do not have + # Project corresponding to config files do not have # 'location' attribute, but do have source location. # It might be more reasonable to make every project have # a location and use some other approach to prevent buildable # targets in config files, but that's for later. - l = get('source-location') - + l = self.get('source-location') + value = os.path.join(l, value) # Now make the value absolute path. Constants should be in # platform-native form. @@ -596,12 +634,13 @@ class ProjectTarget (AbstractTarget): bjam.call("set-variable", self.project_module(), name, value) def inherit(self, parent_project): + assert isinstance(parent_project, ProjectTarget) for c in parent_project.constants_: # No need to pass the type. Path constants were converted to # absolute paths already by parent. self.add_constant(c, parent_project.constants_[c]) - - # Import rules from parent + + # Import rules from parent this_module = self.project_module() parent_module = parent_project.project_module() @@ -612,20 +651,21 @@ class ProjectTarget (AbstractTarget): if x not in self.manager().projects().project_rules().all_names()] if user_rules: bjam.call("import-rules-from-parent", parent_module, this_module, user_rules) - + class MainTarget (AbstractTarget): """ A named top-level target in Jamfile. """ def __init__ (self, name, project): - AbstractTarget.__init__ (self, name, project) + AbstractTarget.__init__ (self, name, project) self.alternatives_ = [] self.default_build_ = property_set.empty () - + def add_alternative (self, target): """ Add a new alternative for this target. """ + assert isinstance(target, AbstractTarget) d = target.default_build () - + if self.alternatives_ and self.default_build_ != d: get_manager().errors()("default build must be identical in all alternatives\n" "main target is '%s'\n" @@ -637,7 +677,7 @@ class MainTarget (AbstractTarget): self.alternatives_.append (target) - def __select_alternatives (self, property_set, debug): + def __select_alternatives (self, property_set_, debug): """ Returns the best viable alternative for this property_set See the documentation for selection rules. # TODO: shouldn't this be 'alternative' (singular)? @@ -647,14 +687,17 @@ class MainTarget (AbstractTarget): # lib l : l.cpp : debug ; # lib l : l_opt.cpp : release ; # won't work unless we add default value debug. - property_set = property_set.add_defaults () - + assert isinstance(property_set_, property_set.PropertySet) + assert isinstance(debug, int) # also matches bools + + property_set_ = property_set_.add_defaults () + # The algorithm: we keep the current best viable alternative. # When we've got new best viable alternative, we compare it - # with the current one. + # with the current one. best = None best_properties = None - + if len (self.alternatives_) == 0: return None @@ -662,11 +705,11 @@ class MainTarget (AbstractTarget): return self.alternatives_ [0] if debug: - print "Property set for selection:", property_set + print "Property set for selection:", property_set_ for v in self.alternatives_: - properties = v.match (property_set, debug) - + properties = v.match (property_set_, debug) + if properties is not None: if not best: best = v @@ -689,8 +732,9 @@ class MainTarget (AbstractTarget): return best - def apply_default_build (self, property_set): - return apply_default_build(property_set, self.default_build_) + def apply_default_build (self, property_set_): + assert isinstance(property_set_, property_set.PropertySet) + return apply_default_build(property_set_, self.default_build_) def generate (self, ps): """ Select an alternative for this main target, by finding all alternatives @@ -698,23 +742,24 @@ class MainTarget (AbstractTarget): longest requirements set. Returns the result of calling 'generate' on that alternative. """ + assert isinstance(ps, property_set.PropertySet) self.manager_.targets ().start_building (self) # We want composite properties in build request act as if # all the properties it expands too are explicitly specified. ps = ps.expand () - + all_property_sets = self.apply_default_build (ps) result = GenerateResult () - + for p in all_property_sets: result.extend (self.__generate_really (p)) self.manager_.targets ().end_building (self) return result - + def __generate_really (self, prop_set): """ Generates the main target with the given property set and returns a list which first element is property_set object @@ -722,6 +767,7 @@ class MainTarget (AbstractTarget): generated virtual target in other elements. It's possible that no targets are generated. """ + assert isinstance(prop_set, property_set.PropertySet) best_alternative = self.__select_alternatives (prop_set, debug=0) if not best_alternative: @@ -732,24 +778,25 @@ class MainTarget (AbstractTarget): % (self.full_name(),)) result = best_alternative.generate (prop_set) - + # Now return virtual targets for the only alternative return result - + def rename(self, new_name): + assert isinstance(new_name, basestring) AbstractTarget.rename(self, new_name) for a in self.alternatives_: a.rename(new_name) class FileReference (AbstractTarget): """ Abstract target which refers to a source file. - This is artificial creature; it's usefull so that sources to + This is artificial creature; it's usefull so that sources to a target can be represented as list of abstract target instances. """ def __init__ (self, manager, file, project): AbstractTarget.__init__ (self, file, project) self.file_location_ = None - + def generate (self, properties): return GenerateResult (None, [ self.manager_.virtual_targets ().from_file ( @@ -767,7 +814,7 @@ class FileReference (AbstractTarget): # Returns the location of target. Needed by 'testing.jam' if not self.file_location_: source_location = self.project_.get('source-location') - + for src_dir in source_location: location = os.path.join(src_dir, self.name()) if os.path.isfile(location): @@ -783,24 +830,26 @@ def resolve_reference(target_reference, project): as properties explicitly specified for this reference. """ # Separate target name from properties override + assert isinstance(target_reference, basestring) + assert isinstance(project, ProjectTarget) split = _re_separate_target_from_properties.match (target_reference) if not split: raise BaseException ("Invalid reference: '%s'" % target_reference) - + id = split.group (1) - + sproperties = [] - + if split.group (3): sproperties = property.create_from_strings(feature.split(split.group(3))) sproperties = feature.expand_composites(sproperties) - + # Find the target target = project.find (id) - + return (target, property_set.create(sproperties)) -def generate_from_reference(target_reference, project, property_set): +def generate_from_reference(target_reference, project, property_set_): """ Attempts to generate the target given by target reference, which can refer both to a main target or to a file. Returns a list consisting of @@ -810,13 +859,16 @@ def generate_from_reference(target_reference, project, property_set): project: Project where the reference is made property_set: Properties of the main target that makes the reference """ + assert isinstance(target_reference, basestring) + assert isinstance(project, ProjectTarget) + assert isinstance(property_set_, property_set.PropertySet) target, sproperties = resolve_reference(target_reference, project) - + # Take properties which should be propagated and refine them # with source-specific requirements. - propagated = property_set.propagated() + propagated = property_set_.propagated() rproperties = propagated.refine(sproperties) - + return target.generate(rproperties) @@ -828,14 +880,18 @@ class BasicTarget (AbstractTarget): targets. """ def __init__ (self, name, project, sources, requirements = None, default_build = None, usage_requirements = None): + assert is_iterable_typed(sources, basestring) + assert isinstance(requirements, property_set.PropertySet) + assert isinstance(default_build, property_set.PropertySet) + assert isinstance(usage_requirements, property_set.PropertySet) AbstractTarget.__init__ (self, name, project) - + for s in sources: if get_grist (s): raise InvalidSource ("property '%s' found in the 'sources' parameter for '%s'" % (s, name)) - + self.sources_ = sources - + if not requirements: requirements = property_set.empty () self.requirements_ = requirements @@ -844,13 +900,13 @@ class BasicTarget (AbstractTarget): if not usage_requirements: usage_requirements = property_set.empty () self.usage_requirements_ = usage_requirements - + # A cache for resolved references self.source_targets_ = None - + # A cache for generated targets self.generated_ = {} - + # A cache for build requests self.request_cache = {} @@ -865,12 +921,12 @@ class BasicTarget (AbstractTarget): def always(self): self.always_ = True - + def sources (self): """ Returns the list of AbstractTargets which are used as sources. The extra properties specified for sources are not represented. The only used of this rule at the moment is the '--dump-tests' - feature of the test system. + feature of the test system. """ if self.source_targets_ == None: self.source_targets_ = [] @@ -881,7 +937,7 @@ class BasicTarget (AbstractTarget): def requirements (self): return self.requirements_ - + def default_build (self): return self.default_build_ @@ -892,8 +948,10 @@ class BasicTarget (AbstractTarget): """ # For optimization, we add free unconditional requirements directly, # without using complex algorithsm. - # This gives the complex algorithm better chance of caching results. + # This gives the complex algorithm better chance of caching results. # The exact effect of this "optimization" is no longer clear + assert isinstance(build_request, property_set.PropertySet) + assert isinstance(requirements, property_set.PropertySet) free_unconditional = [] other = [] for p in requirements.all(): @@ -902,7 +960,7 @@ class BasicTarget (AbstractTarget): else: other.append(p) other = property_set.create(other) - + key = (build_request, other) if not self.request_cache.has_key(key): self.request_cache[key] = self.__common_properties2 (build_request, other) @@ -910,8 +968,8 @@ class BasicTarget (AbstractTarget): return self.request_cache[key].add_raw(free_unconditional) # Given 'context' -- a set of already present properties, and 'requirements', - # decide which extra properties should be applied to 'context'. - # For conditional requirements, this means evaluating condition. For + # decide which extra properties should be applied to 'context'. + # For conditional requirements, this means evaluating condition. For # indirect conditional requirements, this means calling a rule. Ordinary # requirements are always applied. # @@ -920,20 +978,23 @@ class BasicTarget (AbstractTarget): # # gcc:release release:RELEASE # - # If 'what' is 'refined' returns context refined with new requirements. + # If 'what' is 'refined' returns context refined with new requirements. # If 'what' is 'added' returns just the requirements that must be applied. def evaluate_requirements(self, requirements, context, what): - # Apply non-conditional requirements. - # It's possible that that further conditional requirement change + # Apply non-conditional requirements. + # It's possible that that further conditional requirement change # a value set by non-conditional requirements. For example: # # exe a : a.cpp : single foo:multi ; - # + # # I'm not sure if this should be an error, or not, especially given that # - # single + # single # # might come from project's requirements. + assert isinstance(requirements, property_set.PropertySet) + assert isinstance(context, property_set.PropertySet) + assert isinstance(what, basestring) unconditional = feature.expand(requirements.non_conditional()) context = context.refine(property_set.create(unconditional)) @@ -941,7 +1002,7 @@ class BasicTarget (AbstractTarget): # We've collected properties that surely must be present in common # properties. We now try to figure out what other properties # should be added in order to satisfy rules (4)-(6) from the docs. - + conditionals = property_set.create(requirements.conditional()) # It's supposed that #conditionals iterations @@ -949,34 +1010,38 @@ class BasicTarget (AbstractTarget): # direction. max_iterations = len(conditionals.all()) +\ len(requirements.get("")) + 1 - + added_requirements = [] current = context - + # It's assumed that ordinary conditional requirements can't add # properties, and that rules referred - # by properties can't add new + # by properties can't add new # properties. So the list of indirect conditionals # does not change. indirect = requirements.get("") - + ok = 0 for i in range(0, max_iterations): e = conditionals.evaluate_conditionals(current).all()[:] - + # Evaluate indirect conditionals. for i in indirect: + new = None i = b2.util.jam_to_value_maybe(i) if callable(i): # This is Python callable, yeah. - e.extend(i(current)) + new = i(current) else: # Name of bjam function. Because bjam is unable to handle # list of Property, pass list of strings. br = b2.util.call_jam_function(i[1:], [str(p) for p in current.all()]) if br: - e.extend(property.create_from_strings(br)) + new = property.create_from_strings(br) + if new: + new = property.translate_paths(new, self.project().location()) + e.extend(new) if e == added_requirements: # If we got the same result, we've found final properties. @@ -994,7 +1059,7 @@ class BasicTarget (AbstractTarget): self.manager().errors()("Can't evaluate conditional properties " + str(conditionals)) - + if what == "added": return property_set.create(unconditional + added_requirements) elif what == "refined": @@ -1009,57 +1074,62 @@ class BasicTarget (AbstractTarget): # and expands to bar2, but default value of is not bar2, # in which case it's not clear what to do. # + assert isinstance(build_request, property_set.PropertySet) + assert isinstance(requirements, property_set.PropertySet) build_request = build_request.add_defaults() # Featured added by 'add-default' can be composite and expand # to features without default values -- so they are not added yet. # It could be clearer/faster to expand only newly added properties # but that's not critical. build_request = build_request.expand() - + return self.evaluate_requirements(requirements, build_request, "refined") - - def match (self, property_set, debug): + + def match (self, property_set_, debug): """ Returns the alternative condition for this alternative, if the condition is satisfied by 'property_set'. """ # The condition is composed of all base non-conditional properties. # It's not clear if we should expand 'self.requirements_' or not. # For one thing, it would be nice to be able to put - # msvc-6.0 + # msvc-6.0 # in requirements. - # On the other hand, if we have release in condition it + # On the other hand, if we have release in condition it # does not make sense to require full to be in # build request just to select this variant. + assert isinstance(property_set_, property_set.PropertySet) bcondition = self.requirements_.base () ccondition = self.requirements_.conditional () condition = b2.util.set.difference (bcondition, ccondition) if debug: print " next alternative: required properties:", [str(p) for p in condition] - - if b2.util.set.contains (condition, property_set.all()): + + if b2.util.set.contains (condition, property_set_.all()): if debug: print " matched" - + return condition else: return None - def generate_dependency_targets (self, target_ids, property_set): + def generate_dependency_targets (self, target_ids, property_set_): + assert is_iterable_typed(target_ids, basestring) + assert isinstance(property_set_, property_set.PropertySet) targets = [] usage_requirements = [] for id in target_ids: - - result = generate_from_reference(id, self.project_, property_set) + + result = generate_from_reference(id, self.project_, property_set_) targets += result.targets() usage_requirements += result.usage_requirements().all() - return (targets, usage_requirements) - + return (targets, usage_requirements) + def generate_dependency_properties(self, properties, ps): """ Takes a target reference, which might be either target id or a dependency property, and generates that target using @@ -1067,20 +1137,22 @@ class BasicTarget (AbstractTarget): Returns a tuple (result, usage_requirements). """ + assert is_iterable_typed(properties, property.Property) + assert isinstance(ps, property_set.PropertySet) result_properties = [] usage_requirements = [] for p in properties: - + result = generate_from_reference(p.value(), self.project_, ps) for t in result.targets(): result_properties.append(property.Property(p.feature(), t)) - + usage_requirements += result.usage_requirements().all() - return (result_properties, usage_requirements) + return (result_properties, usage_requirements) + - @user_error_checkpoint @@ -1089,9 +1161,10 @@ class BasicTarget (AbstractTarget): and calls 'construct'. This method should not be overridden. """ + assert isinstance(ps, property_set.PropertySet) self.manager_.errors().push_user_context( "Generating target " + self.full_name(), self.user_context_) - + if self.manager().targets().logging(): self.manager().targets().log( "Building target '%s'" % self.name_) @@ -1100,26 +1173,26 @@ class BasicTarget (AbstractTarget): "Build request: '%s'" % str (ps.raw ())) cf = self.manager().command_line_free_features() self.manager().targets().log( - "Command line free features: '%s'" % str (cf.raw ())) + "Command line free features: '%s'" % str (cf.raw ())) self.manager().targets().log( "Target requirements: %s'" % str (self.requirements().raw ())) - + self.manager().targets().push_target(self) if not self.generated_.has_key(ps): # Apply free features form the command line. If user - # said + # said # define=FOO # he most likely want this define to be set for all compiles. - ps = ps.refine(self.manager().command_line_free_features()) + ps = ps.refine(self.manager().command_line_free_features()) rproperties = self.common_properties (ps, self.requirements_) self.manager().targets().log( "Common properties are '%s'" % str (rproperties)) - + if rproperties.get("") != ["no"]: - + result = GenerateResult () properties = rproperties.non_dependency () @@ -1142,9 +1215,9 @@ class BasicTarget (AbstractTarget): self.manager_.targets().log( "Build properties: '%s'" % str(rproperties)) - + source_targets += rproperties.get('') - + # We might get duplicate sources, for example if # we link to two library which have the same in # usage requirements. @@ -1170,7 +1243,7 @@ class BasicTarget (AbstractTarget): self.manager().virtual_targets().recent_targets(), ps, source_targets, rproperties, usage_requirements) self.manager().virtual_targets().clear_recent_targets() - + ur = self.compute_usage_requirements (s) ur = ur.add (gur) s.set_usage_requirements (ur) @@ -1178,7 +1251,7 @@ class BasicTarget (AbstractTarget): self.manager_.targets().log ( "Usage requirements from '%s' are '%s'" % (self.name(), str(rproperties))) - + self.generated_[ps] = GenerateResult (ur, result) else: self.generated_[ps] = GenerateResult (property_set.empty(), []) @@ -1195,7 +1268,7 @@ class BasicTarget (AbstractTarget): # dependencies # - it's not clear if that's a good idea anyway. The alias # target, for example, should not fail to build if a dependency - # fails. + # fails. self.generated_[ps] = GenerateResult( property_set.create(["no"]), []) else: @@ -1205,21 +1278,22 @@ class BasicTarget (AbstractTarget): self.manager().targets().decrease_indent() return self.generated_[ps] - + def compute_usage_requirements (self, subvariant): - """ Given the set of generated targets, and refined build + """ Given the set of generated targets, and refined build properties, determines and sets appripriate usage requirements on those targets. """ + assert isinstance(subvariant, virtual_target.Subvariant) rproperties = subvariant.build_properties () xusage_requirements =self.evaluate_requirements( self.usage_requirements_, rproperties, "added") - + # We generate all dependency properties and add them, # as well as their usage requirements, to result. (r1, r2) = self.generate_dependency_properties(xusage_requirements.dependency (), rproperties) extra = r1 + r2 - + result = property_set.create (xusage_requirements.non_dependency () + extra) # Propagate usage requirements we've got from sources, except @@ -1230,7 +1304,7 @@ class BasicTarget (AbstractTarget): # # pch pch1 : ... # lib lib1 : ..... pch1 ; - # pch pch2 : + # pch pch2 : # lib lib2 : pch2 lib1 ; # # Here, lib2 should not get property from pch1. @@ -1241,7 +1315,7 @@ class BasicTarget (AbstractTarget): # features are special. removed_pch = filter(lambda prop: prop.feature().name() not in ['', ''], subvariant.sources_usage_requirements().all()) result = result.add(property_set.PropertySet(removed_pch)) - + return result def create_subvariant (self, root_targets, all_targets, @@ -1249,23 +1323,29 @@ class BasicTarget (AbstractTarget): rproperties, usage_requirements): """Creates a new subvariant-dg instances for 'targets' - 'root-targets' the virtual targets will be returned to dependents - - 'all-targets' all virtual + - 'all-targets' all virtual targets created while building this main target - 'build-request' is property-set instance with requested build properties""" - + assert is_iterable_typed(root_targets, virtual_target.VirtualTarget) + assert is_iterable_typed(all_targets, virtual_target.VirtualTarget) + assert isinstance(build_request, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(rproperties, property_set.PropertySet) + assert isinstance(usage_requirements, property_set.PropertySet) + for e in root_targets: e.root (True) s = Subvariant (self, build_request, sources, rproperties, usage_requirements, all_targets) - + for v in all_targets: if not v.creating_subvariant(): v.creating_subvariant(s) - + return s - + def construct (self, name, source_targets, properties): """ Constructs the virtual targets for this abstract targets and the dependecy graph. Returns a tuple consisting of the properties and the list of virtual targets. @@ -1276,21 +1356,24 @@ class BasicTarget (AbstractTarget): class TypedTarget (BasicTarget): import generators - + def __init__ (self, name, project, type, sources, requirements, default_build, usage_requirements): + assert isinstance(type, basestring) BasicTarget.__init__ (self, name, project, sources, requirements, default_build, usage_requirements) self.type_ = type def __jam_repr__(self): return b2.util.value_to_jam(self) - + def type (self): return self.type_ - - def construct (self, name, source_targets, prop_set): + def construct (self, name, source_targets, prop_set): + assert isinstance(name, basestring) + assert is_iterable_typed(source_targets, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) r = generators.construct (self.project_, os.path.splitext(name)[0], - self.type_, + self.type_, prop_set.add_raw(['' + self.type_]), source_targets, True) @@ -1303,17 +1386,19 @@ class TypedTarget (BasicTarget): print "error: and the requested properties" print "error: make sure you've configured the needed tools" print "See http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html" - + print "To debug this problem, try the --debug-generators option." sys.exit(1) - + return r -def apply_default_build(property_set, default_build): +def apply_default_build(property_set_, default_build): # 1. First, see what properties from default_build - # are already present in property_set. + # are already present in property_set. + assert isinstance(property_set_, property_set.PropertySet) + assert isinstance(default_build, property_set.PropertySet) - specified_features = set(p.feature() for p in property_set.all()) + specified_features = set(p.feature() for p in property_set_.all()) defaults_to_apply = [] for d in default_build.all(): @@ -1341,24 +1426,29 @@ def apply_default_build(property_set, default_build): # be an indication that # build_request.expand-no-defaults is the wrong rule # to use here. - compressed = feature.compress_subproperties(property_set.all()) + compressed = feature.compress_subproperties(property_set_.all()) result = build_request.expand_no_defaults( b2.build.property_set.create(feature.expand([p])) for p in (compressed + defaults_to_apply)) else: - result.append (property_set) + result.append (property_set_) return result def create_typed_metatarget(name, type, sources, requirements, default_build, usage_requirements): - + assert isinstance(name, basestring) + assert isinstance(type, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) + from b2.manager import get_manager t = get_manager().targets() - + project = get_manager().projects().current() - + return t.main_target_alternative( TypedTarget(name, project, type, t.main_target_sources(sources, name), @@ -1368,17 +1458,22 @@ def create_typed_metatarget(name, type, sources, requirements, default_build, us def create_metatarget(klass, name, sources, requirements=[], default_build=[], usage_requirements=[]): + assert isinstance(name, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) from b2.manager import get_manager t = get_manager().targets() - + project = get_manager().projects().current() - + return t.main_target_alternative( klass(name, project, t.main_target_sources(sources, name), t.main_target_requirements(requirements, project), t.main_target_default_build(default_build, project), - t.main_target_usage_requirements(usage_requirements, project))) + t.main_target_usage_requirements(usage_requirements, project))) def metatarget_function_for_class(class_): @@ -1390,7 +1485,7 @@ def metatarget_function_for_class(class_): t = get_manager().targets() project = get_manager().projects().current() - + return t.main_target_alternative( class_(name, project, t.main_target_sources(sources, name), diff --git a/src/build/toolset.py b/src/build/toolset.py index e969123d4..672d18f5a 100644 --- a/src/build/toolset.py +++ b/src/build/toolset.py @@ -1,18 +1,20 @@ # Status: being ported by Vladimir Prus # Base revision: 40958 # -# Copyright 2003 Dave Abrahams -# Copyright 2005 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Dave Abrahams +# Copyright 2005 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) """ Support for toolset definition. """ import feature, property, generators, property_set import b2.util.set -from b2.util import cached, qualify_jam_action +import bjam + +from b2.util import cached, qualify_jam_action, is_iterable_typed, is_iterable from b2.util.utility import * from b2.util import bjam_signature from b2.manager import get_manager @@ -22,7 +24,7 @@ __re_two_ampersands = re.compile ('(&&)') __re_first_segment = re.compile ('([^.]*).*') __re_first_group = re.compile (r'[^.]*\.(.*)') -# Flag is a mechanism to set a value +# Flag is a mechanism to set a value # A single toolset flag. Specifies that when certain # properties are in build property set, certain values # should be appended to some variable. @@ -30,13 +32,18 @@ __re_first_group = re.compile (r'[^.]*\.(.*)') # A flag applies to a specific action in specific module. # The list of all flags for a module is stored, and each # flag further contains the name of the rule it applies -# for, +# for, class Flag: def __init__(self, variable_name, values, condition, rule = None): + assert isinstance(variable_name, basestring) + assert is_iterable(values) and all( + isinstance(v, (basestring, type(None))) for v in values) + assert is_iterable_typed(condition, property_set.PropertySet) + assert isinstance(rule, (basestring, type(None))) self.variable_name = variable_name self.values = values - self.condition = condition + self.condition = condition self.rule = rule def __str__(self): @@ -47,7 +54,7 @@ def reset (): """ Clear the module state. This is mainly for testing purposes. """ global __module_flags, __flags, __stv - + # Mapping from module name to a list of all flags that apply # to either that module directly, or to any rule in that module. # Each element of the list is Flag instance. @@ -61,21 +68,21 @@ def reset (): # entries for module name 'xxx', they are flags for 'xxx' itself, # not including any rules in that module. __flags = {} - + # A cache for varaible settings. The key is generated from the rule name and the properties. __stv = {} - + reset () # FIXME: --ignore-toolset-requirements def using(toolset_module, *args): loaded_toolset_module= get_manager().projects().load_module(toolset_module, [os.getcwd()]); loaded_toolset_module.init(*args) - + # FIXME push-checking-for-flags-module .... # FIXME: investigate existing uses of 'hack-hack' parameter # in jam code. - + @bjam_signature((["rule_or_module", "variable_name", "condition", "*"], ["values", "*"])) def flags(rule_or_module, variable_name, condition, values = []): @@ -84,7 +91,7 @@ def flags(rule_or_module, variable_name, condition, values = []): rule_or_module: If contains dot, should be a rule name. The flags will be applied when that rule is used to set up build actions. - + If does not contain dot, should be a module name. The flags will be applied for all rules in that module. @@ -92,7 +99,7 @@ def flags(rule_or_module, variable_name, condition, values = []): module, an error is issued. variable_name: Variable that should be set on target - + condition A condition when this flag should be applied. Should be set of property sets. If one of those property sets is contained in build @@ -102,21 +109,25 @@ def flags(rule_or_module, variable_name, condition, values = []): "gcc". Subfeatures, like in "gcc-3.2" are allowed. If left empty, the flag will always used. - - Propery sets may use value-less properties - ('' vs. 'value') to match absent + + Propery sets may use value-less properties + ('' vs. 'value') to match absent properties. This allows to separately match - + /64 ia64/ - + Where both features are optional. Without this syntax we'd be forced to define "default" value. values: The value to add to variable. If - is specified, then the value of 'feature' + is specified, then the value of 'feature' will be added. """ + assert isinstance(rule_or_module, basestring) + assert isinstance(variable_name, basestring) + assert is_iterable_typed(condition, basestring) + assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values) caller = bjam.caller() if not '.' in rule_or_module and caller and caller[:-1].startswith("Jamfile"): # Unqualified rule name, used inside Jamfile. Most likely used with @@ -129,17 +140,17 @@ def flags(rule_or_module, variable_name, condition, values = []): # FIXME: revive checking that we don't set flags for a different # module unintentionally pass - + if condition and not replace_grist (condition, ''): # We have condition in the form '', that is, without # value. That's a previous syntax: # # flags gcc.link RPATH ; # for compatibility, convert it to - # flags gcc.link RPATH : ; + # flags gcc.link RPATH : ; values = [ condition ] condition = None - + if condition: transformed = [] for c in condition: @@ -150,14 +161,17 @@ def flags(rule_or_module, variable_name, condition, values = []): condition = transformed property.validate_property_sets(condition) - + __add_flag (rule_or_module, variable_name, condition, values) def set_target_variables (manager, rule_or_module, targets, ps): """ """ + assert isinstance(rule_or_module, basestring) + assert is_iterable_typed(targets, basestring) + assert isinstance(ps, property_set.PropertySet) settings = __set_target_variables_aux(manager, rule_or_module, ps) - + if settings: for s in settings: for target in targets: @@ -166,7 +180,8 @@ def set_target_variables (manager, rule_or_module, targets, ps): def find_satisfied_condition(conditions, ps): """Returns the first element of 'property-sets' which is a subset of 'properties', or an empty list if no such element exists.""" - + assert is_iterable_typed(conditions, property_set.PropertySet) + assert isinstance(ps, property_set.PropertySet) features = set(p.feature() for p in ps.all()) for condition in conditions: @@ -177,11 +192,11 @@ def find_satisfied_condition(conditions, ps): found = False if i.value(): found = i.value() in ps.get(i.feature()) - else: - # Handle value-less properties like '' (compare with + else: + # Handle value-less properties like '' (compare with # 'x86'). - # If $(i) is a value-less property it should match default - # value of an optional property. See the first line in the + # If $(i) is a value-less property it should match default + # value of an optional property. See the first line in the # example below: # # property set properties result @@ -197,22 +212,27 @@ def find_satisfied_condition(conditions, ps): return condition return None - + def register (toolset): """ Registers a new toolset. """ + assert isinstance(toolset, basestring) feature.extend('toolset', [toolset]) def inherit_generators (toolset, properties, base, generators_to_ignore = []): + assert isinstance(toolset, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(base, basestring) + assert is_iterable_typed(generators_to_ignore, basestring) if not properties: properties = [replace_grist (toolset, '')] - + base_generators = generators.generators_for_toolset(base) - + for g in base_generators: id = g.id() - + if not id in generators_to_ignore: # Some generator names have multiple periods in their name, so # $(id:B=$(toolset)) doesn't generate the right new_id name. @@ -232,13 +252,16 @@ def inherit_flags(toolset, base, prohibited_properties = []): 'prohibited-properties' are ignored. Don't confuse property and feature, for example on and off, so blocking one of them does not block the other one. - + The flag conditions are not altered at all, so if a condition includes a name, or version of a base toolset, it won't ever match the inheriting toolset. When such flag settings must be inherited, define a rule in base toolset module and call it as needed.""" + assert isinstance(toolset, basestring) + assert isinstance(base, basestring) + assert is_iterable_typed(prohibited_properties, basestring) for f in __module_flags.get(base, []): - + if not f.condition or b2.util.set.difference(f.condition, prohibited_properties): match = __re_first_group.match(f.rule) rule_ = None @@ -254,38 +277,20 @@ def inherit_flags(toolset, base, prohibited_properties = []): __add_flag (new_rule_or_module, f.variable_name, f.condition, f.values) -def inherit_rules (toolset, base): - pass - # FIXME: do something about this. -# base_generators = generators.generators_for_toolset (base) -# import action +def inherit_rules(toolset, base): + engine = get_manager().engine() + new_actions = {} + for action_name, action in engine.actions.iteritems(): + module, id = split_action_id(action_name) + if module == base: + new_action_name = toolset + '.' + id + # make sure not to override any existing actions + # that may have been declared already + if new_action_name not in engine.actions: + new_actions[new_action_name] = action -# ids = [] -# for g in base_generators: -# (old_toolset, id) = split_action_id (g.id ()) -# ids.append (id) ; - -# new_actions = [] - -# engine = get_manager().engine() - # FIXME: do this! -# for action in engine.action.values(): -# pass -# (old_toolset, id) = split_action_id(action.action_name) -# -# if old_toolset == base: -# new_actions.append ((id, value [0], value [1])) -# -# for a in new_actions: -# action.register (toolset + '.' + a [0], a [1], a [2]) - - # TODO: how to deal with this? -# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ; -# # Import the rules to the global scope -# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ; -# } -# + engine.actions.update(new_actions) ###################################################################################### # Private functions @@ -294,12 +299,14 @@ def inherit_rules (toolset, base): def __set_target_variables_aux (manager, rule_or_module, ps): """ Given a rule name and a property set, returns a list of tuples of variables names and values, which must be set on targets for that - rule/properties combination. + rule/properties combination. """ + assert isinstance(rule_or_module, basestring) + assert isinstance(ps, property_set.PropertySet) result = [] for f in __flags.get(rule_or_module, []): - + if not f.condition or find_satisfied_condition (f.condition, ps): processed = [] for v in f.values: @@ -309,10 +316,10 @@ def __set_target_variables_aux (manager, rule_or_module, ps): for r in processed: result.append ((f.variable_name, r)) - + # strip away last dot separated part and recurse. next = __re_split_last_segment.match(rule_or_module) - + if next: result.extend(__set_target_variables_aux( manager, next.group(1), ps)) @@ -320,12 +327,14 @@ def __set_target_variables_aux (manager, rule_or_module, ps): return result def __handle_flag_value (manager, value, ps): + assert isinstance(value, basestring) + assert isinstance(ps, property_set.PropertySet) result = [] - + if get_grist (value): f = feature.get(value) values = ps.get(f) - + for value in values: if f.dependency(): @@ -334,7 +343,7 @@ def __handle_flag_value (manager, value, ps): result.append(value.actualize()) elif f.path() or f.free(): - + # Treat features with && in the value # specially -- each &&-separated element is considered # separate value. This is needed to handle searched @@ -355,8 +364,13 @@ def __add_flag (rule_or_module, variable_name, condition, values): """ Adds a new flag setting with the specified values. Does no checking. """ + assert isinstance(rule_or_module, basestring) + assert isinstance(variable_name, basestring) + assert is_iterable_typed(condition, property_set.PropertySet) + assert is_iterable(values) and all( + isinstance(v, (basestring, type(None))) for v in values) f = Flag(variable_name, values, condition, rule_or_module) - + # Grab the name of the module m = __re_first_segment.match (rule_or_module) assert m @@ -377,21 +391,24 @@ def add_requirements(requirements): will be automatically added to the requirements for all main targets, as if they were specified literally. For best results, all requirements added should be conditional or indirect conditional.""" - + assert is_iterable_typed(requirements, basestring) + #if ! $(.ignore-requirements) #{ __requirements.extend(requirements) #} - + # Make toolset 'toolset', defined in a module of the same name, # inherit from 'base' # 1. The 'init' rule from 'base' is imported into 'toolset' with full # name. Another 'init' is called, which forwards to the base one. -# 2. All generators from 'base' are cloned. The ids are adjusted and +# 2. All generators from 'base' are cloned. The ids are adjusted and # property in requires is adjusted too # 3. All flags are inherited # 4. All rules are imported. def inherit(toolset, base): + assert isinstance(toolset, basestring) + assert isinstance(base, basestring) get_manager().projects().load_module(base, []); inherit_generators(toolset, [], base) diff --git a/src/build/type.py b/src/build/type.py index e815739f4..c8d6334c7 100644 --- a/src/build/type.py +++ b/src/build/type.py @@ -14,7 +14,7 @@ import os.path from b2.util.utility import replace_grist, os_name from b2.exceptions import * from b2.build import feature, property, scanner -from b2.util import bjam_signature +from b2.util import bjam_signature, is_iterable_typed __re_hyphen = re.compile ('-') @@ -32,17 +32,17 @@ def __register_features (): def reset (): """ Clear the module state. This is mainly for testing purposes. Note that this must be called _after_ resetting the module 'feature'. - """ + """ global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache - + __register_features () # Stores suffixes for generated targets. __prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()] - + # Maps suffixes to types __suffixes_to_types = {} - + # A map with all the registered types, indexed by the type name # Each entry is a dictionary with following values: # 'base': the name of base type or None if type has no base @@ -52,12 +52,12 @@ def reset (): # Caches suffixes for targets with certain properties. __target_suffixes_cache = {} - + reset () @bjam_signature((["type"], ["suffixes", "*"], ["base_type", "?"])) def register (type, suffixes = [], base_type = None): - """ Registers a target type, possibly derived from a 'base-type'. + """ Registers a target type, possibly derived from a 'base-type'. If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'. Also, the first element gives the suffix to be used when constructing and object of 'type'. @@ -70,7 +70,7 @@ def register (type, suffixes = [], base_type = None): # which need to be decomposed. if __re_hyphen.search (type): raise BaseException ('type name "%s" contains a hyphen' % type) - + if __types.has_key (type): raise BaseException ('Type "%s" is already registered.' % type) @@ -79,7 +79,7 @@ def register (type, suffixes = [], base_type = None): entry ['derived'] = [] entry ['scanner'] = None __types [type] = entry - + if base_type: __types.setdefault(base_type, {}).setdefault('derived', []).append(type) @@ -87,17 +87,17 @@ def register (type, suffixes = [], base_type = None): # Generated targets of 'type' will use the first of 'suffixes' # (this may be overriden) set_generated_target_suffix (type, [], suffixes [0]) - + # Specify mapping from suffixes to type register_suffixes (suffixes, type) - + feature.extend('target-type', [type]) feature.extend('main-target-type', [type]) feature.extend('base-target-type', [type]) if base_type: - feature.compose ('' + type, replace_grist (base_type, '')) - feature.compose ('' + type, '' + base_type) + feature.compose ('' + type, [replace_grist (base_type, '')]) + feature.compose ('' + type, ['' + base_type]) import b2.build.generators as generators # Adding a new derived type affects generator selection so we need to @@ -111,13 +111,16 @@ def register (type, suffixes = [], base_type = None): # FIXME: quick hack. def type_from_rule_name(rule_name): + assert isinstance(rule_name, basestring) return rule_name.upper().replace("-", "_") def register_suffixes (suffixes, type): - """ Specifies that targets with suffix from 'suffixes' have the type 'type'. + """ Specifies that targets with suffix from 'suffixes' have the type 'type'. If a different type is already specified for any of syffixes, issues an error. """ + assert is_iterable_typed(suffixes, basestring) + assert isinstance(type, basestring) for s in suffixes: if __suffixes_to_types.has_key (s): old_type = __suffixes_to_types [s] @@ -129,40 +132,51 @@ def register_suffixes (suffixes, type): def registered (type): """ Returns true iff type has been registered. """ + assert isinstance(type, basestring) return __types.has_key (type) def validate (type): """ Issues an error if 'type' is unknown. """ + assert isinstance(type, basestring) if not registered (type): raise BaseException ("Unknown target type '%s'" % type) def set_scanner (type, scanner): """ Sets a scanner class that will be used for this 'type'. """ + if __debug__: + from .scanner import Scanner + assert isinstance(type, basestring) + assert issubclass(scanner, Scanner) validate (type) __types [type]['scanner'] = scanner def get_scanner (type, prop_set): """ Returns a scanner instance appropriate to 'type' and 'property_set'. """ + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(prop_set, PropertySet) if registered (type): scanner_type = __types [type]['scanner'] if scanner_type: return scanner.get (scanner_type, prop_set.raw ()) pass - + return None def base(type): """Returns a base type for the given type or nothing in case the given type is not derived.""" - + assert isinstance(type, basestring) return __types[type]['base'] def all_bases (type): """ Returns type and all of its bases, in the order of their distance from type. """ + assert isinstance(type, basestring) result = [] while type: result.append (type) @@ -173,6 +187,7 @@ def all_bases (type): def all_derived (type): """ Returns type and all classes that derive from it, in the order of their distance from type. """ + assert isinstance(type, basestring) result = [type] for d in __types [type]['derived']: result.extend (all_derived (d)) @@ -182,21 +197,25 @@ def all_derived (type): def is_derived (type, base): """ Returns true if 'type' is 'base' or has 'base' as its direct or indirect base. """ + assert isinstance(type, basestring) + assert isinstance(base, basestring) # TODO: this isn't very efficient, especially for bases close to type if base in all_bases (type): return True - else: + else: return False def is_subtype (type, base): """ Same as is_derived. Should be removed. """ + assert isinstance(type, basestring) + assert isinstance(base, basestring) # TODO: remove this method return is_derived (type, base) @bjam_signature((["type"], ["properties", "*"], ["suffix"])) def set_generated_target_suffix (type, properties, suffix): - """ Sets a target suffix that should be used when generating target + """ Sets a target suffix that should be used when generating target of 'type' with the specified properties. Can be called with empty properties if no suffix for 'type' was specified yet. This does not automatically specify that files 'suffix' have @@ -208,17 +227,27 @@ def set_generated_target_suffix (type, properties, suffix): The 'suffix' parameter can be empty string ("") to indicate that no suffix should be used. """ + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(suffix, basestring) set_generated_target_ps(1, type, properties, suffix) - + def change_generated_target_suffix (type, properties, suffix): - """ Change the suffix previously registered for this type/properties + """ Change the suffix previously registered for this type/properties combination. If suffix is not yet specified, sets it. """ + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(suffix, basestring) change_generated_target_ps(1, type, properties, suffix) def generated_target_suffix(type, properties): + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(properties, PropertySet) return generated_target_ps(1, type, properties) # Sets a target prefix that should be used when generating targets of 'type' @@ -236,16 +265,31 @@ def set_generated_target_prefix(type, properties, prefix): # Change the prefix previously registered for this type/properties combination. # If prefix is not yet specified, sets it. def change_generated_target_prefix(type, properties, prefix): + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(prefix, basestring) change_generated_target_ps(0, type, properties, prefix) def generated_target_prefix(type, properties): + if __debug__: + from .property_set import PropertySet + assert isinstance(type, basestring) + assert isinstance(properties, PropertySet) return generated_target_ps(0, type, properties) def set_generated_target_ps(is_suffix, type, properties, val): + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(val, basestring) properties.append ('' + type) __prefixes_suffixes[is_suffix].insert (properties, val) def change_generated_target_ps(is_suffix, type, properties, val): + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) + assert isinstance(val, basestring) properties.append ('' + type) prev = __prefixes_suffixes[is_suffix].find_replace(properties, val) if not prev: @@ -256,7 +300,9 @@ def change_generated_target_ps(is_suffix, type, properties, val): # If no prefix/suffix is specified for 'type', returns prefix/suffix for # base type, if any. def generated_target_ps_real(is_suffix, type, properties): - + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert is_iterable_typed(properties, basestring) result = '' found = False while type and not found: @@ -278,6 +324,11 @@ def generated_target_ps(is_suffix, type, prop_set): with the specified properties. If not suffix were specified for 'type', returns suffix for base type, if any. """ + if __debug__: + from .property_set import PropertySet + assert isinstance(is_suffix, (int, bool)) + assert isinstance(type, basestring) + assert isinstance(prop_set, PropertySet) key = (is_suffix, type, prop_set) v = __target_suffixes_cache.get(key, None) @@ -289,14 +340,15 @@ def generated_target_ps(is_suffix, type, prop_set): def type(filename): """ Returns file type given it's name. If there are several dots in filename, - tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and + tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and "so" will be tried. """ + assert isinstance(filename, basestring) while 1: filename, suffix = os.path.splitext (filename) if not suffix: return None suffix = suffix[1:] - + if __suffixes_to_types.has_key(suffix): return __suffixes_to_types[suffix] @@ -306,6 +358,10 @@ def register_type (type, suffixes, base_type = None, os = []): if os is not specified. This rule is injected into each of the type modules for the sake of convenience. """ + assert isinstance(type, basestring) + assert is_iterable_typed(suffixes, basestring) + assert isinstance(base_type, basestring) or base_type is None + assert is_iterable_typed(os, basestring) if registered (type): return diff --git a/src/build/version.jam b/src/build/version.jam index fa8fb3a56..8a1a957a1 100644 --- a/src/build/version.jam +++ b/src/build/version.jam @@ -6,13 +6,13 @@ import numbers ; -.major = "2014" ; -.minor = "03" ; +.major = "2015" ; +.minor = "07" ; rule boost-build ( ) { - return "$(.major).$(.minor)-svn" ; + return "$(.major).$(.minor)-git" ; } diff --git a/src/build/virtual_target.py b/src/build/virtual_target.py index ac6703056..ea4b24d82 100644 --- a/src/build/virtual_target.py +++ b/src/build/virtual_target.py @@ -67,7 +67,7 @@ import os.path import string import types -from b2.util import path, utility, set +from b2.util import path, utility, set, is_iterable_typed from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, get_value from b2.util.sequence import unique from b2.tools import common @@ -110,6 +110,7 @@ class VirtualTargetRegistry: and equal action. If such target is found it is retured and 'target' is not registered. Otherwise, 'target' is registered and returned. """ + assert isinstance(target, VirtualTarget) if target.path(): signature = target.path() + "-" + target.name() else: @@ -156,6 +157,11 @@ class VirtualTargetRegistry: for the project, and use that path to determine if the target was already created. TODO: passing project with all virtual targets starts to be annoying. """ + if __debug__: + from .targets import ProjectTarget + assert isinstance(file, basestring) + assert isinstance(file_location, basestring) + assert isinstance(project, ProjectTarget) # Check if we've created a target corresponding to this file. path = os.path.join(os.getcwd(), file_location, file) path = os.path.normpath(path) @@ -192,6 +198,8 @@ class VirtualTargetRegistry: return [t for t in targets if b2.build.type.is_sybtype(t.type(), type)] def register_actual_name (self, actual_name, virtual_target): + assert isinstance(actual_name, basestring) + assert isinstance(virtual_target, VirtualTarget) if self.actual_.has_key (actual_name): cs1 = self.actual_ [actual_name].creating_subvariant () cs2 = virtual_target.creating_subvariant () @@ -238,6 +246,9 @@ class VirtualTargetRegistry: """ Appends the suffix appropriate to 'type/property_set' combination to the specified name and returns the result. """ + assert isinstance(specified_name, basestring) + assert isinstance(file_type, basestring) + assert isinstance(prop_set, property_set.PropertySet) suffix = b2.build.type.generated_target_suffix (file_type, prop_set) if suffix: @@ -254,6 +265,10 @@ class VirtualTarget: project: project to which this target belongs. """ def __init__ (self, name, project): + if __debug__: + from .targets import ProjectTarget + assert isinstance(name, basestring) + assert isinstance(project, ProjectTarget) self.name_ = name self.project_ = project self.dependencies_ = [] @@ -302,6 +317,9 @@ class VirtualTarget: If scanner is not specified, then actual target is returned. """ + if __debug__: + from .scanner import Scanner + assert scanner is None or isinstance(scanner, Scanner) actual_name = self.actualize_no_scanner () if self.always_: @@ -373,6 +391,9 @@ class AbstractFileTarget (VirtualTarget): type: optional type of this target. """ def __init__ (self, name, type, project, action = None, exact=False): + assert isinstance(type, basestring) or type is None + assert action is None or isinstance(action, Action) + assert isinstance(exact, (int, bool)) VirtualTarget.__init__ (self, name, project) self.type_ = type @@ -402,6 +423,7 @@ class AbstractFileTarget (VirtualTarget): """ Sets the path. When generating target name, it will override any path computation from properties. """ + assert isinstance(path, basestring) self.path_ = os.path.normpath(path) def action (self): @@ -413,6 +435,7 @@ class AbstractFileTarget (VirtualTarget): """ Sets/gets the 'root' flag. Target is root is it directly correspods to some variant of a main target. """ + assert isinstance(set, (int, bool, type(None))) if set: self.root_ = True return self.root_ @@ -425,6 +448,7 @@ class AbstractFileTarget (VirtualTarget): s: If specified, specified the value to set, which should be instance of 'subvariant' class. """ + assert s is None or isinstance(s, Subvariant) if s and not self.creating_subvariant (): if self.creating_subvariant (): raise BaseException ("Attempt to change 'dg'") @@ -435,6 +459,7 @@ class AbstractFileTarget (VirtualTarget): return self.creating_subvariant_ def actualize_action (self, target): + assert isinstance(target, basestring) if self.action_: self.action_.actualize () @@ -513,7 +538,7 @@ class AbstractFileTarget (VirtualTarget): If not property is specified, or the rule specified by returns nothing, returns the result of calling virtual-target.add-suffix""" - + assert isinstance(specified_name, basestring) if self.action_: ps = self.action_.properties() else: @@ -627,6 +652,9 @@ class FileTarget (AbstractFileTarget): - the suffix which correspond to the target's type. """ def __init__ (self, name, type, project, action = None, path=None, exact=False): + assert isinstance(type, basestring) or type is None + assert action is None or isinstance(action, Action) + assert isinstance(exact, (int, bool)) AbstractFileTarget.__init__ (self, name, type, project, action, exact) self.path_ = path @@ -638,10 +666,12 @@ class FileTarget (AbstractFileTarget): return self.name_ def clone_with_different_type(self, new_type): + assert isinstance(new_type, basestring) return FileTarget(self.name_, new_type, self.project_, self.action_, self.path_, exact=True) def actualize_location (self, target): + assert isinstance(target, basestring) engine = self.project_.manager_.engine () if self.action_: @@ -714,6 +744,7 @@ class FileTarget (AbstractFileTarget): class NotFileTarget(AbstractFileTarget): def __init__(self, name, project, action): + assert isinstance(action, Action) AbstractFileTarget.__init__(self, name, None, project, action) def path(self): @@ -721,6 +752,7 @@ class NotFileTarget(AbstractFileTarget): return None def actualize_location(self, target): + assert isinstance(target, basestring) bjam.call("NOTFILE", target) bjam.call("ALWAYS", target) bjam.call("NOUPDATE", target) @@ -735,8 +767,9 @@ class Action: not establish dependency relationship, but should do everything else. """ def __init__ (self, manager, sources, action_name, prop_set): + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(action_name, basestring) or action_name is None assert(isinstance(prop_set, property_set.PropertySet)) - assert type(sources) == types.ListType self.sources_ = sources self.action_name_ = action_name if not prop_set: @@ -758,11 +791,14 @@ class Action: def add_targets (self, targets): + assert is_iterable_typed(targets, VirtualTarget) self.targets_ += targets - def replace_targets (old_targets, new_targets): - self.targets_ = [t for t in targets if not t in old_targets] + new_targets + def replace_targets(self, old_targets, new_targets): + assert is_iterable_typed(old_targets, VirtualTarget) + assert is_iterable_typed(new_targets, VirtualTarget) + self.targets_ = [t for t in self.targets_ if not t in old_targets] + new_targets def targets (self): return self.targets_ @@ -826,6 +862,8 @@ class Action: For each passed source, actualizes it with the appropriate scanner. Returns the actualized virtual targets. """ + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) result = [] for i in sources: scanner = None @@ -852,6 +890,8 @@ class Action: New values will be *appended* to the variables. They may be non-empty, if caller wants it. """ + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) dependencies = self.properties_.get ('') self.dependency_only_sources_ += self.actualize_source_type (dependencies, prop_set) @@ -879,6 +919,7 @@ class Action: to get generated headers correctly. Default implementation returns its argument. """ + assert isinstance(prop_set, property_set.PropertySet) return prop_set @@ -889,6 +930,7 @@ class NullAction (Action): actions which create them. """ def __init__ (self, manager, prop_set): + assert isinstance(prop_set, property_set.PropertySet) Action.__init__ (self, manager, [], None, prop_set) def actualize (self): @@ -908,7 +950,8 @@ class NonScanningAction(Action): Action.__init__(self, b2.manager.get_manager(), sources, action_name, property_set) def actualize_source_type(self, sources, property_set): - + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(property_set, property_set.PropertySet) result = [] for s in sources: result.append(s.actualize()) @@ -920,6 +963,9 @@ def traverse (target, include_roots = False, include_sources = False): found during traversal, it's either included or not, dependencing of the value of 'include_roots'. In either case, sources of root are not traversed. """ + assert isinstance(target, VirtualTarget) + assert isinstance(include_roots, (int, bool)) + assert isinstance(include_sources, (int, bool)) result = [] if target.action (): @@ -951,7 +997,12 @@ def clone_action (action, new_project, new_action_name, new_properties): and all produced target. The rule-name and properties are set to 'new-rule-name' and 'new-properties', if those are specified. Returns the cloned action.""" - + if __debug__: + from .targets import ProjectTarget + assert isinstance(action, Action) + assert isinstance(new_project, ProjectTarget) + assert isinstance(new_action_name, basestring) + assert isinstance(new_properties, property_set.PropertySet) if not new_action_name: new_action_name = action.action_name() @@ -990,6 +1041,14 @@ class Subvariant: sources_usage_requirements: Properties propagated from sources created_targets: Top-level created targets """ + if __debug__: + from .targets import AbstractTarget + assert isinstance(main_target, AbstractTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, VirtualTarget) + assert isinstance(build_properties, property_set.PropertySet) + assert isinstance(sources_usage_requirements, property_set.PropertySet) + assert is_iterable_typed(created_targets, VirtualTarget) self.main_target_ = main_target self.properties_ = prop_set self.sources_ = sources @@ -1028,6 +1087,7 @@ class Subvariant: return self.sources_usage_requirements_ def set_usage_requirements (self, usage_requirements): + assert isinstance(usage_requirements, property_set.PropertySet) self.usage_requirements_ = usage_requirements def usage_requirements (self): @@ -1038,7 +1098,9 @@ class Subvariant: either directly or indirectly, and either as sources, or as dependency properties. Targets referred with dependency property are returned a properties, not targets.""" - + if __debug__: + from .targets import GenerateResult + assert isinstance(result, GenerateResult) # Find directly referenced targets. deps = self.build_properties().dependency() all_targets = self.sources_ + deps @@ -1071,7 +1133,8 @@ class Subvariant: if 'target_type' is not specified), the result will contain <$(feature)>path-to-that-target. """ - + assert isinstance(feature, basestring) + assert isinstance(target_type, basestring) if not target_type: key = feature else: @@ -1088,6 +1151,7 @@ class Subvariant: return result def all_target_directories(self, target_type = None): + assert isinstance(target_type, (basestring, type(None))) # TODO: does not appear to use target_type in deciding # if we've computed this already. if not self.target_directories_: @@ -1095,6 +1159,7 @@ class Subvariant: return self.target_directories_ def compute_target_directories(self, target_type=None): + assert isinstance(target_type, (basestring, type(None))) result = [] for t in self.created_targets(): if not target_type or b2.build.type.is_derived(t.type(), target_type): diff --git a/src/build_system.py b/src/build_system.py index 6bd05d1d9..b5a3b2775 100644 --- a/src/build_system.py +++ b/src/build_system.py @@ -509,15 +509,6 @@ def main_real(): # that all project files already be loaded. (target_ids, properties) = build_request.from_command_line(sys.argv[1:] + extra_properties) - # Expand properties specified on the command line into multiple property - # sets consisting of all legal property combinations. Each expanded property - # set will be used for a single build run. E.g. if multiple toolsets are - # specified then requested targets will be built with each of them. - if properties: - expanded = build_request.expand_no_defaults(properties) - else: - expanded = [property_set.empty()] - # Check that we actually found something to build. if not current_project and not target_ids: get_manager().errors()("no Jamfile in current directory found, and no target references specified.") @@ -595,6 +586,22 @@ def main_real(): global results_of_main_targets + # Expand properties specified on the command line into multiple property + # sets consisting of all legal property combinations. Each expanded property + # set will be used for a single build run. E.g. if multiple toolsets are + # specified then requested targets will be built with each of them. + # The expansion is being performed as late as possible so that the feature + # validation is performed after all necessary modules (including project targets + # on the command line) have been loaded. + if properties: + expanded = [] + for p in properties: + expanded.extend(build_request.convert_command_line_element(p)) + + expanded = build_request.expand_no_defaults(expanded) + else: + expanded = [property_set.empty()] + # Now that we have a set of targets to build and a set of property sets to # build the targets with, we can start the main build process by using each # property set to generate virtual targets from all of our listed targets diff --git a/src/contrib/boost.jam b/src/contrib/boost.jam index 7daefd0c7..c3caa3a3b 100644 --- a/src/contrib/boost.jam +++ b/src/contrib/boost.jam @@ -207,33 +207,37 @@ rule boost_std ( inc ? lib ? ) alias headers ; boost_lib_std chrono : BOOST_CHRONO_DYN_LINK ; + boost_lib_std container : BOOST_CONTAINER_DYN_LINK ; boost_lib_std date_time : BOOST_DATE_TIME_DYN_LINK ; boost_lib_std filesystem : BOOST_FILE_SYSTEM_DYN_LINK ; boost_lib_std graph : BOOST_GRAPH_DYN_LINK ; boost_lib_std graph_parallel : BOOST_GRAPH_DYN_LINK ; boost_lib_std iostreams : BOOST_IOSTREAMS_DYN_LINK ; boost_lib_std locale : BOOST_LOCALE_DYN_LINK ; - boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ; - boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ; - boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std log : BOOST_LOG_DYN_LINK ; + boost_lib_std log_setup : BOOST_LOG_SETUP_DYN_LINK ; boost_lib_std math_c99 : BOOST_MATH_TR1_DYN_LINK ; boost_lib_std math_c99f : BOOST_MATH_TR1_DYN_LINK ; boost_lib_std math_c99l : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1 : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1f : BOOST_MATH_TR1_DYN_LINK ; + boost_lib_std math_tr1l : BOOST_MATH_TR1_DYN_LINK ; boost_lib_std mpi : BOOST_MPI_DYN_LINK ; + boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ; boost_lib_std program_options : BOOST_PROGRAM_OPTIONS_DYN_LINK ; boost_lib_std python : BOOST_PYTHON_DYN_LINK ; boost_lib_std python3 : BOOST_PYTHON_DYN_LINK ; boost_lib_std random : BOOST_RANDOM_DYN_LINK ; boost_lib_std regex : BOOST_REGEX_DYN_LINK ; boost_lib_std serialization : BOOST_SERIALIZATION_DYN_LINK ; - boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ; boost_lib_std signals : BOOST_SIGNALS_DYN_LINK ; boost_lib_std system : BOOST_SYSTEM_DYN_LINK ; - boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ; - boost_lib_std prg_exec_monitor : BOOST_TEST_DYN_LINK ; boost_lib_std test_exec_monitor : BOOST_TEST_DYN_LINK ; boost_lib_std thread : BOOST_THREAD_DYN_DLL ; + boost_lib_std timer : BOOST_TIMER_DYN_DLL ; + boost_lib_std unit_test_framework : BOOST_TEST_DYN_LINK ; boost_lib_std wave : BOOST_WAVE_DYN_LINK ; + boost_lib_std wserialization : BOOST_SERIALIZATION_DYN_LINK ; } # Example placeholder for rules defining Boost library project & library targets diff --git a/src/contrib/boost.py b/src/contrib/boost.py index e256fe965..7d1f6b4d9 100644 --- a/src/contrib/boost.py +++ b/src/contrib/boost.py @@ -4,7 +4,7 @@ # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Boost library support module. -# +# # This module allows to use the boost library from boost-build projects. # The location of a boost source tree or the path to a pre-built # version of the library can be configured from either site-config.jam @@ -13,15 +13,15 @@ # tree. As a last resort it tries to use pre-built libraries from the standard # search path of the compiler. # -# If the location to a source tree is known, the module can be configured +# If the location to a source tree is known, the module can be configured # from the *-config.jam files: # # using boost : 1.35 : /path-to-boost-root ; # # If the location to a pre-built version is known: # -# using boost : 1.34 -# : /usr/local/include/boost_1_34 +# using boost : 1.34 +# : /usr/local/include/boost_1_34 # /usr/local/lib # ; # @@ -41,7 +41,7 @@ # # boost.use-project ; # -# The library can be referenced with the project identifier '/boost'. To +# The library can be referenced with the project identifier '/boost'. To # reference the program_options you would specify: # # exe myexe : mysrc.cpp : /boost//program_options ; @@ -76,7 +76,7 @@ __debug = None def debug(): global __debug if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") + __debug = "--debug-configuration" in bjam.variable("ARGV") return __debug @@ -94,9 +94,9 @@ def debug(): # /path-to-include: The include directory to search. # # /path-to-library: The library directory to search. -# +# # system or versioned. -# +# # my_build_id: The custom build id to use. # def init(version, options = None): @@ -130,7 +130,7 @@ rules = projects.project_rules() # of the boost library. If the 'version' parameter is omitted either # the configured default (first in config files) is used or an auto # configuration will be attempted. -# +# @bjam_signature(([ "version", "?" ], )) def use_project(version = None): projects.push_current( projects.current() ) @@ -149,7 +149,7 @@ def use_project(version = None): root = opts.get('' ) inc = opts.get('') lib = opts.get('') - + if debug(): print "notice: using boost library {} {}".format( version, opt.raw() ) @@ -171,7 +171,7 @@ def use_project(version = None): root = bjam.variable("BOOST_ROOT") module = projects.current().project_module() - + if root: bjam.call('call-in-module', module, 'use-project', ['boost', root]) else: @@ -199,14 +199,15 @@ def boost_std(inc = None, lib = None): tag_prop_set = property_set.create([property.Property('', tag_std)]) attributes = projects.attributes(projects.current().project_module()) attributes.requirements = attributes.requirements.refine(tag_prop_set) - + alias('headers') - + def boost_lib(lib_name, dyn_link_macro): if (isinstance(lib_name,str)): lib_name = [lib_name] builtin.lib(lib_name, usage_requirements=['shared:{}'.format(dyn_link_macro)]) - + + boost_lib('container' , 'BOOST_CONTAINER_DYN_LINK' ) boost_lib('date_time' , 'BOOST_DATE_TIME_DYN_LINK' ) boost_lib('filesystem' , 'BOOST_FILE_SYSTEM_DYN_LINK' ) boost_lib('graph' , 'BOOST_GRAPH_DYN_LINK' ) @@ -267,7 +268,7 @@ def tag_std(name, type, prop_set): def tag_maybe(param): return ['-{}'.format(param)] if param else [] - + def tag_system(name, type, prop_set): return common.format_name([''] + tag_maybe(__build_id), name, type, prop_set) diff --git a/src/contrib/modular.jam b/src/contrib/modular.jam index 917dfbaa5..cba517048 100644 --- a/src/contrib/modular.jam +++ b/src/contrib/modular.jam @@ -3,108 +3,11 @@ # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) -#alias library -# : -# : : : include -# ; - import path ; import project ; import modules ; import regex ; - -rule find ( target-refs + ) -{ - process-args ; - - local caller-mod = [ CALLER_MODULE ] ; - local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ; - caller-dir = $(caller-dir:D) ; - caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ; - - for local target-ref in $(target-refs) - { - local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ; - local search-prefix ; - local search-sub ; - for local prefix in $(.search-path-prefix) - { - if ! $(search-prefix) - { - local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ; - search-prefix = $(search-match[1]) ; - search-sub = $(search-match[2]) ; - } - } - local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ; - found = $(found[1]) ; - if $(found) - { - local lib-ref = [ regex.split $(search-sub) / ] ; - lib-ref = $(search-prefix)/$(lib-ref[1]) ; - local lib-path = [ path.relative-to $(caller-dir) $(found) ] ; - library $(lib-ref) $(caller-mod) : $(lib-path) ; - } - } - - return $(target-refs) ; -} - -rule library ( name caller-module ? : root ) -{ - process-args ; - - # Dir path of caller to base paths from. - caller-module ?= [ CALLER_MODULE ] ; - local caller-dir = [ modules.peek $(caller-module) : __file__ ] ; - caller-dir = $(caller-dir:D) ; - - # Find the various parts of the library. - local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ; - local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ; - lib-contents = $(lib-contents:D=) ; - # "include" dir for library.. - local include-dir ; - if "include" in $(lib-contents) - { - include-dir = include ; - } - - # Does it look like a library? - if $(include-dir) - { - # Load/create/declare library project. - local lib-module = [ project.find $(root) : $(caller-dir) ] ; - if ! $(lib-module) - { - lib-module = [ project.load - [ path.root [ path.make $(root) ] $(caller-dir) ] : synthesize ] ; - } - local lib-target = [ project.target $(lib-module) ] ; - - # We move to the library project module to define the various - # targets others use for the library. - if ! [ modules.peek $(lib-module) : __library__ ] - { - modules.poke $(lib-module) : __library__ : $(name) ; - project.push-current $(lib-target) ; - - # Declare the library alias. - modules.call-in $(lib-module) : alias library - : # Sources - : # Requirements - : # Default Build - : # Usage Requirements - $(include-dir) - ; - - project.pop-current ; - } - - # Declare project alternate ID. - modules.call-in $(caller-module) : use-project $(name) : $(root) ; - } -} +import type ; # Add a location, i.e. directory, where to search for libraries. # The optional 'prefix' indicates which rooted-prefixes the new @@ -126,6 +29,167 @@ rule add-location ( dir prefix ? : base-dir ? ) .search-path.$(prefix) += [ path.root [ path.root $(dir) $(base-dir) ] [ path.pwd ] ] ; } +# Declares additional definitions of a modular library target external +# to the modular library build itself. This makes it possible to externally +# define modular libraries without modifying the library. The passed in +# values are added on demand when the named library is first declared. +rule external ( + name : sources * : requirements * : default-build * : + usage-requirements * ) +{ + .external.($(name)).sources = $(sources) ; + .external.($(name)).requirements = $(requirements) ; + .external.($(name)).default-build = $(default-build) ; + .external.($(name)).usage-requirements = $(usage-requirements) ; +} + +# Find, and declare, any modular libraries referenced in the target-refs. +# This will both load the modular libraries, and declare/manufacture +# the modular libraries as needed. +rule find ( target-refs + ) +{ + process-args ; + + local caller-mod = [ CALLER_MODULE ] ; + local caller-dir = [ modules.peek $(caller-mod) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ; + + local result-refs ; + for local target-ref in $(target-refs) + { + result-refs += [ resolve-reference $(target-ref) + : $(caller-mod) $(caller-dir) ] ; + } + + return $(result-refs) ; +} + +############################################################################## + +local rule resolve-reference ( target-ref : caller-mod caller-dir ? ) +{ + # ECHO %%% modular.resolve-target-ref $(target-ref) :: $(caller-mod) $(caller-dir) ; + if ! $(caller-dir) + { + caller-dir = [ modules.peek $(caller-mod) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + caller-dir = [ path.root $(caller-dir) [ path.pwd ] ] ; + } + local result-ref = $(target-ref) ; + local ref = [ MATCH ^(.*)//.* : $(target-ref:G=) ] ; + # if ! ( $(ref) in $(.target-refs) ) + { + # .target-refs += $(ref) ; + local search-prefix ; + local search-sub ; + for local prefix in $(.search-path-prefix) + { + if ! $(search-prefix) + { + local search-match = [ MATCH ^($(prefix))/(.*)$ : $(ref) ] ; + search-prefix = $(search-match[1]) ; + search-sub = $(search-match[2]) ; + } + } + + if $(search-prefix) + { + local found = [ path.glob $(.search-path.$(search-prefix)) : $(search-sub) ] ; + found = $(found[1]) ; + if $(found) + { + local lib-ref = [ regex.split $(search-sub) / ] ; + lib-ref = $(search-prefix)/$(lib-ref[1]) ; + local lib-path = [ path.relative-to $(caller-dir) $(found) ] ; + define-library $(lib-ref) $(caller-mod) : $(lib-path) ; + } + } + } + return $(result-ref) ; +} + +local rule define-library ( name caller-module ? : root ) +{ + # ECHO ~~~ modular.library $(name) $(caller-module) :: $(root) :: $(depends) ; + + process-args ; + + # Dir path of caller to base paths from. + caller-module ?= [ CALLER_MODULE ] ; + local caller-dir = [ modules.peek $(caller-module) : __file__ ] ; + caller-dir = $(caller-dir:D) ; + + # Find the various parts of the library. + local lib-dir = [ path.root [ path.root $(root) $(caller-dir) ] [ path.pwd ] ] ; + local lib-contents = [ path.glob $(lib-dir) : "include" "build" ] ; + lib-contents = $(lib-contents:D=) ; + + # "include" dir for library.. + local include-dir ; + if "include" in $(lib-contents) + { + include-dir = $(root)/include ; + } + + # If it has a build dir, i.e. it has targets to build, + # we root the project at the build dir to make it easy + # to refer to the build targets. This mirrors the regular + # Boost organization of the project aliases. + if "build" in $(lib-contents) + { + root = $(root)/build ; + build-dir = "." ; + } + + # Shadow target declarations so that we can alter build targets + # to work in the standalone modular structure. + local lib-location = [ path.root [ path.make $(root) ] $(caller-dir) ] ; + local lib-module-name = [ project.module-name $(lib-location) ] ; + local modular-rules = [ RULENAMES modular-rules ] ; + IMPORT modular-rules : $(modular-rules) : $(lib-module-name) : $(modular-rules) ; + + # Load/create/declare library project. + local lib-module = [ project.find $(root) : $(caller-dir) ] ; + if ! $(lib-module) + { + # If the find was unable to load the project we synthesize it. + lib-module = [ project.load $(lib-location) : synthesize ] ; + } + local lib-target = [ project.target $(lib-module) ] ; + if ! [ modules.peek $(lib-module) : __library__ ] + { + modules.poke $(lib-module) : __library__ : $(name) ; + for local type in [ modules.peek type : .types ] + { + main-rule-name = [ type.type-to-rule-name $(type) ] ; + IMPORT modular-rules : main-target-rule : $(lib-module-name) : $(main-rule-name) ; + } + } + + # Declare project alternate ID. + modules.call-in $(caller-module) : use-project $(name) : $(root) ; + + # Create a "library" target that has basic usage info if needed. + if ! [ $(lib-target).has-alternative-for-target library ] + { + include-dir = [ path.relative-to $(root) $(include-dir) ] ; + + project.push-current $(lib-target) ; + + # Declare the library alias. + modules.call-in $(lib-module) : library + : # Sources + : # Requirements + : # Default Build + : # Usage Requirements + $(include-dir) + ; + + project.pop-current ; + } +} + local rule process-args ( ) { if ! $(.did-process-args) @@ -139,3 +203,86 @@ local rule process-args ( ) } } } + +rule apply-external ( + mod : field : values * ) +{ + local result ; + local name = [ modules.peek $(mod) : __library__ ] ; + values += $(.external.($(name)).$(field)) ; + for local value in $(values) + { + result += [ resolve-reference $(value) : $(mod) ] ; + } + return $(result) ; +} + +module modular-rules +{ + import type ; + import targets ; + import builtin ; + import alias ; + + # Avoids any form of installation for Boost modules. + rule boost-install ( libraries * ) { } + + # Generic typed target rule to pre-process main target + # declarations to make them work within the standalone + # modular structure. + rule main-target-rule ( + name : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + + # ECHO @@@ [[$(mod)]] modular-rules.main-target-rule $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + + # First discover the required target type based on the exact alias used to + # invoke this rule. + local bt = [ BACKTRACE 1 ] ; + local rulename = $(bt[4]) ; + local target-type = [ type.type-from-rule-name $(rulename) ] ; + return [ targets.create-typed-target $(target-type) : [ project.current ] : + $(name) : $(sources) : $(requirements) : $(default-build) : + $(usage-requirements) ] ; + } + + rule lib ( names + : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + requirements += library ; + usage-requirements += library ; + + # ECHO @@@ [[$(mod)]] modular-rules.lib $(names) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ builtin.lib $(names) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } + + rule alias ( name : sources * : requirements * : default-build * : + usage-requirements * ) + { + local mod = [ CALLER_MODULE ] ; + + # ECHO @@@ [[$(mod)]] modular-rules.alias $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } + + rule library ( name ? : sources * : requirements * : default-build * : + usage-requirements * ) + { + import modular ; + + local mod = [ CALLER_MODULE ] ; + sources = [ modular.apply-external $(mod) : sources : $(sources) ] ; + requirements = [ modular.apply-external $(mod) : requirements : $(requirements) ] ; + default-build = [ modular.apply-external $(mod) : default-build : $(default-build) ] ; + usage-requirements = [ modular.apply-external $(mod) : usage-requirements : $(usage-requirements) ] ; + + name ?= library ; + + # ECHO @@@ [[$(mod)]] modular-rules.library $(name) :: $(sources) :: $(requirements) :: $(default-build) :: $(usage-requirements) ; + return [ alias.alias $(name) : $(sources) : $(requirements) : $(default-build) : $(usage-requirements) ] ; + } +} + diff --git a/src/engine/Jambase b/src/engine/Jambase index 6e7b7a2be..a736e2676 100644 --- a/src/engine/Jambase +++ b/src/engine/Jambase @@ -9,6 +9,7 @@ # This file is ALSO: # Copyright 2001-2004 David Abrahams. # Copyright 2002-2004 Rene Rivera. +# Copyright 2015 Artur Shepilko. # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) @@ -223,6 +224,7 @@ else # 11/21/96 (peterk) - Support for BeOS # 07/19/99 (sickel) - Support for Mac OS X Server (and maybe client) # 02/18/00 (belmonte)- Support for Cygwin. +# 02/19/15 (shepilko)- On VMS use POSIX path in rules, translate to VMS via $(:W). # Special targets defined in this file: # @@ -743,6 +745,9 @@ else if $(OS2) } else if $(VMS) { + ## DECC CRTL supports POSIX-style path, JAM is retrofitted to call it. + ## Use POSIX-style path for general handling in JAM rule-scope. + ## Translate the POSIX path to VMS in JAM actions-scope via $(:W) modifier. C++ ?= cxx ; C++FLAGS ?= ; CC ?= cc ; @@ -750,8 +755,8 @@ else if $(VMS) CHMOD ?= set file/prot= ; CP ?= copy/replace ; CRELIB ?= true ; - DOT ?= [] ; - DOTDOT ?= [-] ; + DOT ?= . ; ## Use POSIX CRTL + DOTDOT ?= .. ; ## Use POSIX CRTL EXEMODE ?= (w:e) ; FILEMODE ?= (w:r) ; HDRS ?= ; @@ -759,12 +764,12 @@ else if $(VMS) LINKFLAGS ?= "" ; LINKLIBS ?= ; MKDIR ?= create/dir ; - MV ?= rename ; + MV ?= rename /noconf ; OPTIM ?= "" ; - RM ?= delete ; + RM ?= delete /noconf ; RUNVMS ?= mcr ; SHELLMODE ?= (w:er) ; - SLASH ?= . ; + SLASH ?= / ; ## Use POSIX CRTL STDHDRS ?= decc$library_include ; SUFEXE ?= .exe ; SUFLIB ?= .olb ; @@ -1087,7 +1092,7 @@ rule Cc if $(VMS) && $(_h) { - SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ; + SLASHINC on $(<) = "/inc=(" \"$(_h[1])\" ,\"$(_h[2-])\" ")" ; } else if $(MAC) && $(_h) { @@ -1117,7 +1122,7 @@ rule C++ if $(VMS) && $(_h) { - SLASHINC on $(<) = "/inc=(" $(_h[1]) ,$(_h[2-]) ")" ; + SLASHINC on $(<) = "/inc=(" \"$(_h[1])\" ,\"$(_h[2-])\" ")" ; } else if $(MAC) && $(_h) { @@ -1865,27 +1870,28 @@ rule FDirName { _s = $(DOT) ; } - else if $(VMS) - { - # This handles the following cases: - # a -> [.a] - # a b c -> [.a.b.c] - # x: -> x: - # x: a -> x:[a] - # x:[a] b -> x:[a.b] - - switch $(<[1]) - { - case *:* : _s = $(<[1]) ; - case \\[*\\] : _s = $(<[1]) ; - case * : _s = [.$(<[1])] ; - } - - for _i in [.$(<[2-])] - { - _s = $(_i:R=$(_s)) ; - } - } + ## On VMS use general POSIX path handling, JAM resolves it through VMS CRTL. + #else if $(VMS) + #{ + # # This handles the following cases: + # # a -> [.a] + # # a b c -> [.a.b.c] + # # x: -> x: + # # x: a -> x:[a] + # # x:[a] b -> x:[a.b] + # + # switch $(<[1]) + # { + # case *:* : _s = $(<[1]) ; + # case \\[*\\] : _s = $(<[1]) ; + # case * : _s = [.$(<[1])] ; + # } + # + # for _i in [.$(<[2-])] + # { + # _s = $(_i:R=$(_s)) ; + # } + #} else if $(MAC) { _s = $(DOT) ; @@ -2371,47 +2377,63 @@ else if $(VMS) { actions updated together piecemeal Archive { - lib/replace $(<) $(>[1]) ,$(>[2-]) + lib/replace $(<:W) $(>[1]:W) ,$(>[2-]:W) } actions Cc { - $(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>) + $(CC)/obj=$(<:W) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>:W) } actions C++ { - $(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>) + $(C++)/obj=$(<:W) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>:W) } actions piecemeal together existing Clean { - $(RM) $(>[1]);* ,$(>[2-]);* + $(RM) $(>[1]:W);* ,$(>[2-]:W);* } actions together quietly CreLib { - if f$search("$(<)") .eqs. "" then lib/create $(<) + if f$search("$(<:W)") .eqs. "" then lib/create $(<:W) } actions GenFile1 { - mcr $(>[1]) $(<) $(>[2-]) + mcr $(>[1]:W) $(<:W) $(>[2-]:W) + } + + actions MkDir1 + { + $(MKDIR) $(<:W) + } + + actions Yacc1 + { + $(YACC) $(YACCFLAGS) $(>:W) + } + + actions YaccMv + { + $(MV) $(YACCFILES).c $(<[1]:W) + $(MV) $(YACCFILES).h $(<[2]:W) } actions Link bind NEEDLIBS { - $(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS) + $(LINK)/exe=$(<:W) $(LINKFLAGS) $(>[1]:W) ,$(>[2-]:W) ,$(NEEDLIBS:W)/lib ,$(LINKLIBS:W) } actions quietly updated piecemeal together RmTemps { - $(RM) $(>[1]);* ,$(>[2-]);* + $(RM) $(>[1]:W);* ,$(>[2-]:W);* } actions Shell { - $(CP) $(>) $(<) + $(CP) $(>:W) $(<:W) } } diff --git a/src/engine/build.bat b/src/engine/build.bat index 0fdb804b6..e0e742da2 100644 --- a/src/engine/build.bat +++ b/src/engine/build.bat @@ -365,7 +365,7 @@ if NOT "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" ( if "_%VCINSTALLDIR%_" == "__" ( set "PATH=%BOOST_JAM_TOOLSET_ROOT%bin;%PATH%" ) ) -set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib" +set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DWINVER=0x0501 -D_WIN32_WINNT=0x0501 -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib" set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0" set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0" set "BOOST_JAM_OPT_YYACC=/Febootstrap\yyacc0" diff --git a/src/engine/build.jam b/src/engine/build.jam index 0b6d2f252..a0f1ea30f 100644 --- a/src/engine/build.jam +++ b/src/engine/build.jam @@ -21,7 +21,8 @@ for local v in ARGV CC CFLAGS LIBS if $(OS) = NT { rule .path { return "$(<:J=\\)" ; } ./ = "/" ; } else { rule .path { return "$(<:J=/)" ; } } -. = "." ; +if $(OS) = VMS { . = "_" ; } +else { . = "." ; } ./ ?= "" ; # Info about what we are building. @@ -105,6 +106,12 @@ if $(with-python) } } +if $(--python-include) || $(--python-lib) +{ + ECHO "Python includes: $(--python-include:J=)" ; + ECHO "Python includes: $(--python-lib:J=)" ; +} + # Boehm GC? if --gc in $(ARGV) { @@ -273,13 +280,6 @@ toolset kylix bc++ : -o : -D [ opt --debug : -runtime ssd ] : $(--python-lib[1]) ; } -## MINGW GCC -toolset mingw gcc : "-o " : -D - : - [ opt --release : -s -O3 -finline-functions ] - [ opt --debug : -g -O0 -fno-inline -pg ] - -I$(--python-include) -I$(--extra-include) - : $(--python-lib[2]) ; ## MIPS Pro toolset mipspro cc : "-o " : -D : @@ -336,13 +336,22 @@ toolset tru64cxx cc : "-o " : -D [ opt --debug : -g -O0 -pg ] -I$(--python-include) -I$(--extra-include) : -L$(--python-lib[1]) -l$(--python-lib[2]) ; -## IBM VisualAge C++ +## IBM VisualAge C++ or IBM XL C/C++ for Aix or IBM XL C/C++ for Linux (Big endian) toolset vacpp xlc : "-o " : -D : [ opt --release : -s -O3 -qstrict -qinline ] [ opt --debug : -g -qNOOPTimize -qnoinline -pg ] -I$(--python-include) -I$(--extra-include) : -L$(--python-lib[1]) -l$(--python-lib[2]) [ if-os AIX : -bmaxdata:0x40000000 ] ; + +## IBM XL C/C++ for Linux (little endian) +toolset xlcpp xlC : "-o " : -D + : -Wno-unused -Wno-format + [ opt --release : -s ] + [ opt --debug : -g -qNOOPTimize -qnoinline -pg ] + -I$(--python-include) -I$(--extra-include) + : -L$(--python-lib[1]) -l$(--python-lib[2]) ; + ## Microsoft Visual C++ .NET 7.x toolset vc7 cl : /Fe /Fe /Fd /Fo : -D : /nologo @@ -391,6 +400,15 @@ toolset vc14 cl : /Fe /Fe /Fd /Fo : -D [ opt --debug : /MTd /DEBUG /Z7 /Od /Ob0 /wd4996 ] -I$(--python-include) -I$(--extra-include) : kernel32.lib advapi32.lib user32.lib $(--python-lib[1]) ; +## VMS/OpenVMS DEC C +toolset vmsdecc cc : /OBJECT= : "/DEFINES=(" "," ")" + : /STANDARD=VAXC /PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) + [ opt --release : /OPTIMIZE /NODEBUG /WARN=DISABLE=(LONGEXTERN) ] + [ opt --debug : /NOOPTIMIZE /DEBUG ] ; +toolset vmsdecc LINK .link : /EXECUTABLE= : + : /NOMAP + [ opt --release : /NODEBUG ] + [ opt --debug : /DEBUG ] ; # First set the build commands and options according to the # preset toolset. @@ -490,7 +508,7 @@ if --show-locate-target in $(ARGV) ECHO $(locate-target) ; } -# We have some different files for UNIX, and NT. +# We have some different files for UNIX, VMS, and NT. jam.source = command.c compile.c constants.c debug.c debugger.c execcmd.c frames.c function.c glob.c hash.c hcache.c headers.c hdrmacro.c jam.c jambase.c jamgram.c lists.c @@ -503,6 +521,11 @@ if $(OS) = NT { jam.source += execnt.c filent.c pathnt.c ; } +else if $(OS) = VMS +{ + jam.source += execvms.c filevms.c pathvms.c ; + --flags += /INCLUDE=(\""./modules"\") ; +} else { jam.source += execunix.c fileunix.c pathunix.c ; @@ -548,6 +571,10 @@ if ( $(OS) = NT ) && ! NT in $(--defs) { --defs += NT ; } +if $(OS) = VMS +{ + --defs += VMS ; +} --defs += YYSTACKSIZE=5000 ; if $(with-python) @@ -576,12 +603,18 @@ if $(OS) = NT { actions piecemeal together existing [DELETE] { if $(UNIX) = true { actions piecemeal together existing [DELETE] { rm -f "$(>)" } } +if $(OS) = VMS { actions piecemeal together existing [DELETE] { + DELETE /NOCONF $(>:WJ=;*, );* +} } if $(OS) = NT { --chmod+w = "attrib -r " ; } if $(UNIX) = true { --chmod+w = "chmod +w " ; } +if $(OS) = VMS { + --chmod+w = "SET FILE/PROT=(W:RWED) " ; +} rule .mkdir { @@ -595,11 +628,14 @@ if $(OS) = NT { actions [MKDIR] { if $(UNIX) = true { actions [MKDIR] { mkdir "$(<)" } } +if $(OS) = VMS { actions [MKDIR] { + CREATE/DIR $(<:WJ=, ) +} } rule .exe { local exe = $(<) ; - if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) { exe = $(exe:S=.exe) ; } + if $(OS) = NT || ( $(UNIX) = true && $(OS) = CYGWIN ) || $(OS) = VMS { exe = $(exe:S=.exe) ; } LOCATE on $(exe) = $(locate-target) ; DEPENDS all : $(exe) ; .mkdir $(locate-target) ; @@ -610,8 +646,10 @@ rule .exe { # Translate any subdir elements into a simple file name. local o = [ MATCH "([^/]+)[/]?(.+)" : $(s) ] ; + o = $(o:J=_) ; o = $(o:S=.o) ; + if $(OS) = VMS { o = $(o:S=.obj) ; } objs += $(o) ; LOCATE on $(o) = $(locate-target) ; DEPENDS $(exe) : $(o) ; @@ -634,16 +672,23 @@ rule .exe } return $(exe) ; } -if ! $(--def[2]) { actions [COMPILE] { + +if $(OS) = VMS { actions [COMPILE] { + $(--cc) $(--bin)$(<:WD=) $(--dir)$(<:D)$(./) $(--out)$(<:W) $(--def[1])$(--defs:J=$(--def[2]))$(--def[3]) $(--flags) $(>:W) $(--libs) +} } +else if ! $(--def[2]) { actions [COMPILE] { "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def)$(--defs)" "$(--flags)" "$(>)" "$(--libs)" } } else { actions [COMPILE] { "$(--cc)" "$(--bin)$(<:D=)" "$(--dir)$(<:D)$(./)" $(--out)$(<) "$(--def[1])$(--defs:J=$(--def[2]))$(--def[3])" "$(--flags)" "$(>)" "$(--libs)" } } -actions [COMPILE.LINK] { +if $(OS) = VMS { actions [COMPILE.LINK] { + $(--link) $(--link-bin)$(<:WD=) $(--link-dir)$(<:WD)$(./) $(--link-out)$(<:W) $(--link-def)$(--link-defs) $(--link-flags) $(--link-libs) $(>:WJ=, ) +} } +else { actions [COMPILE.LINK] { "$(--link)" "$(--link-bin)$(<:D=)" "$(--link-dir)$(<:D)$(./)" "$(--link-out)$(<)" "$(--link-def)$(--link-defs)" "$(--link-flags)" "$(>)" "$(--link-libs)" -} +} } rule .link { @@ -658,6 +703,9 @@ if $(OS) = NT { actions [LINK] { if $(UNIX) = true { actions [LINK] { ln -fs "$(>)" "$(<)" } } +if $(OS) = VMS { actions [LINK] { + COPY/REPLACE $(>:W) $(<:W) +} } rule .copy { @@ -668,9 +716,11 @@ rule .copy } # Will be redefined later. -actions [COPY] -{ -} +if $(OS) = VMS { actions [COPY] { + COPY/REPLACE $(>:W) $(<:W) +} } +else { actions [COPY] { +} } rule .move @@ -685,6 +735,9 @@ if $(OS) = NT { actions [MOVE] { if $(UNIX) = true { actions [MOVE] { mv -f "$(>)" "$(<)" } } +if $(OS) = VMS { actions [MOVE] { + RENAME /NOCONF $(>:W) $(<:W) +} } # Generate the grammar tokens table, and the real yacc grammar. rule .yyacc @@ -759,6 +812,13 @@ if $(UNIX) = true { actions [YACC] { exit 1 fi } } +if $(OS) = VMS { actions [YACC] { + IF $(yacc) $(>) + THEN + RENAME /NOCONF y_tab$(<[1]:S) $(<[1]:W) + RENAME /NOCONF y_tab$(<[2]:S) $(<[2]:W) + ENDIF +} } if $(grammar) && ! $(yacc) { EXIT Could not find the 'yacc' tool, and therefore can not build the @@ -849,7 +909,7 @@ dist.source = dist.source = $(dist.source:D=) $(dist.license[1]) $(dist.docs) - build.jam build.bat build.sh + build.jam build.bat build.sh build_vms.com Jambase jamgram.y jamgram.yy [ .path modules set.c ] diff --git a/src/engine/build.sh b/src/engine/build.sh index 6dbc70633..1470b480e 100755 --- a/src/engine/build.sh +++ b/src/engine/build.sh @@ -7,6 +7,7 @@ # Reset the toolset. BOOST_JAM_TOOLSET= +BOOST_JAM_OS= # Run a command, and echo before doing so. Also checks the exit status and quits # if there was an error. @@ -31,7 +32,7 @@ error_exit () echo "###" echo "### Toolsets supported by this script are:" echo "### acc, como, darwin, gcc, intel-darwin, intel-linux, kcc, kylix," - echo "### mipspro, mingw(msys), pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp" + echo "### mipspro, pathscale, pgi, qcc, sun, sunpro, tru64cxx, vacpp" echo "###" echo "### A special toolset; cc, is available which is used as a fallback" echo "### when a more specific toolset is not found and the cc command is" @@ -62,14 +63,20 @@ test_uname () # Try and guess the toolset to bootstrap the build with... Guess_Toolset () { - if test -r /mingw/bin/gcc ; then - BOOST_JAM_TOOLSET=mingw - BOOST_JAM_TOOLSET_ROOT=/mingw/ - elif test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin + if test_uname Darwin ; then BOOST_JAM_TOOLSET=darwin elif test_uname IRIX ; then BOOST_JAM_TOOLSET=mipspro elif test_uname IRIX64 ; then BOOST_JAM_TOOLSET=mipspro elif test_uname OSF1 ; then BOOST_JAM_TOOLSET=tru64cxx elif test_uname QNX && test_path qcc ; then BOOST_JAM_TOOLSET=qcc + elif test_uname Linux && test_path xlc; then + if /usr/bin/lscpu | grep Byte | grep Little > /dev/null 2>&1 ; then + # Little endian linux + BOOST_JAM_TOOLSET=xlcpp + else + #Big endian linux + BOOST_JAM_TOOLSET=vacpp + fi + elif test_uname AIX && test_path xlc; then BOOST_JAM_TOOLSET=vacpp elif test_path gcc ; then BOOST_JAM_TOOLSET=gcc elif test_path icc ; then BOOST_JAM_TOOLSET=intel-linux elif test -r /opt/intel/cc/9.0/bin/iccvars.sh ; then @@ -89,7 +96,6 @@ Guess_Toolset () BOOST_JAM_TOOLSET_ROOT=/opt/intel/compiler50/ia32/ elif test_path pgcc ; then BOOST_JAM_TOOLSET=pgi elif test_path pathcc ; then BOOST_JAM_TOOLSET=pathscale - elif test_path xlc ; then BOOST_JAM_TOOLSET=vacpp elif test_path como ; then BOOST_JAM_TOOLSET=como elif test_path KCC ; then BOOST_JAM_TOOLSET=kcc elif test_path bc++ ; then BOOST_JAM_TOOLSET=kylix @@ -122,15 +128,28 @@ BOOST_JAM_OPT_JAM="-o bootstrap/jam0" BOOST_JAM_OPT_MKJAMBASE="-o bootstrap/mkjambase0" BOOST_JAM_OPT_YYACC="-o bootstrap/yyacc0" case $BOOST_JAM_TOOLSET in - mingw) - if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then - export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH - fi - BOOST_JAM_CC="gcc -DNT" - ;; gcc) - BOOST_JAM_CC=gcc + # Check whether it's MinGW GCC, which has Windows headers and none of POSIX ones. + machine=$(gcc -dumpmachine 2>/dev/null) + if [ $? -ne 0 ]; then + echo "BOOST_JAM_TOOLSET is gcc, but the 'gcc' command cannot be executed." + echo "Make sure 'gcc' is in PATH, or use a different toolset." + exit 1 + fi + case $machine in + *mingw*) + # MinGW insists that its bin directory be in PATH. + if test -r ${BOOST_JAM_TOOLSET_ROOT}bin/gcc ; then + export PATH=${BOOST_JAM_TOOLSET_ROOT}bin:$PATH + fi + BOOST_JAM_CC="gcc -DNT" + BOOST_JAM_OS="NT" + ;; + + *) + BOOST_JAM_CC=gcc + esac ;; darwin) @@ -142,11 +161,11 @@ case $BOOST_JAM_TOOLSET in ;; intel-linux) - which icc >/dev/null 2>&1 + test_path icc >/dev/null 2>&1 if test $? ; then - BOOST_JAM_CC=$(which icc) + BOOST_JAM_CC=`test_path icc` echo "Found $BOOST_JAM_CC in environment" - BOOST_JAM_TOOLSET_ROOT=$(echo $BOOST_JAM_CC | sed -e 's/bin.*\/icc//') + BOOST_JAM_TOOLSET_ROOT=`echo $BOOST_JAM_CC | sed -e 's/bin.*\/icc//'` # probably the most widespread ARCH=intel64 else @@ -185,6 +204,10 @@ case $BOOST_JAM_TOOLSET in vacpp) BOOST_JAM_CC=xlc ;; + + xlcpp) + BOOST_JAM_CC=xlc + ;; como) BOOST_JAM_CC="como --c" @@ -265,8 +288,8 @@ BJAM_SOURCES="\ builtins.c class.c cwd.c native.c md5.c w32_getreg.c modules/set.c\ modules/path.c modules/regex.c modules/property-set.c modules/sequence.c\ modules/order.c" -case $BOOST_JAM_TOOLSET in - mingw) +case $BOOST_JAM_OS in + NT) BJAM_SOURCES="${BJAM_SOURCES} execnt.c filent.c pathnt.c" ;; diff --git a/src/engine/build_vms.com b/src/engine/build_vms.com new file mode 100644 index 000000000..6f73512d0 --- /dev/null +++ b/src/engine/build_vms.com @@ -0,0 +1,153 @@ +$ ! Copyright 2002-2003 Rene Rivera, Johan Nilsson. +$ ! +$ ! 8-APR-2004 Boris Gubenko +$ ! Miscellaneous improvements. +$ ! +$ ! 20-JAN-2015 Artur Shepilko +$ ! Adapt for jam 3.1.19 +$ ! +$ ! Distributed under the Boost Software License, Version 1.0. +$ ! (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +$ ! +$ ! bootstrap build script for Jam +$ ! +$ THIS_FACILITY = "BUILDJAM" +$ +$ verify = f$trnlnm("VERIFY_''THIS_FACILITY'") +$ save_verify = f$verify(verify) +$ +$ SAY := WRITE SYS$OUTPUT +$ ! +$ ON WARNING THEN CONTINUE +$ ON ERROR THEN GOTO EXIT +$ +$ BOOST_JAM_TOOLSET = "vmsdecc" +$ BOOST_JAM_CC = "CC" +$ BJAM_UPDATE = "" +$ +$ ARGS = F$EDIT("''p1' ''p2' ''p3' ''p4'","TRIM,LOWERCASE") +$ ARGS_LEN = F$LENGTH(ARGS) +$ +$ IF F$LOCATE("--update", ARGS) .NE. F$LENGTH(ARGS) THEN BJAM_UPDATE = "update" +$ IF BJAM_UPDATE .EQS. "update" - + .AND. F$SEARCH("[.bootstrap_vms]jam0.exe") .EQS. "" THEN BJAM_UPDATE = "" +$ +$ IF BJAM_UPDATE .NES. "update" +$ THEN +$ GOSUB CLEAN +$ +$ SAY "I|Creating bootstrap directory..." +$ CREATE /DIR [.bootstrap_vms] +$ +$ !------------------ +$ ! NOTE: Assume jamgram and jambase have been generated (true for fresh release). +$ ! Otherwise these need to be re-generated manually. +$ !------------------ +$ +$ SAY "I|Building bootstrap jam..." +$ ! +$ CC_FLAGS = "/DEFINE=VMS /STANDARD=VAXC " + - + "/PREFIX_LIBRARY_ENTRIES=(ALL_ENTRIES) " + - + "/WARNING=DISABLE=(LONGEXTERN)" + - + "/OBJ=[.bootstrap_vms] " +$ +$ CC_INCLUDE="" +$ +$ SAY "I|Using compile flags: ", CC_FLAGS +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE command.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE compile.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE constants.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE debug.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execcmd.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE frames.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE function.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE glob.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hash.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE hdrmacro.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE headers.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jam.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jambase.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE jamgram.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE lists.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE make1.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE object.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE option.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE output.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE parse.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathsys.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE regexp.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE rules.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE scan.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE search.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE subst.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE timestamp.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE variable.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE modules.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE strings.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filesys.c +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE execvms.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE pathvms.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE filevms.c +$ +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE builtins.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE class.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE cwd.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE native.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE md5.c +$ +$ CC_INCLUDE = "/INCLUDE=(""./modules"")" +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]set.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]path.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]regex.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]property-set.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]sequence.c +$ 'BOOST_JAM_CC' 'CC_FLAGS 'CC_INCLUDE [.modules]order.c +$ +$ LIB /CREATE [.bootstrap_vms]jam0.olb [.bootstrap_vms]*.obj +$ LINK /EXEC=[.bootstrap_vms]jam0.exe - + [.bootstrap_vms]jam0.olb/INCLUDE=JAM/LIB +$ +$ IF F$SEARCH("[.bootstrap_vms]*.obj") .NES. "" THEN - + DELETE /NOCONF /NOLOG [.bootstrap_vms]*.obj;*, *.olb;* +$ ENDIF +$ +$ IF F$SEARCH("[.bootstrap_vms]jam0.exe") .NES. "" +$ THEN +$ IF BJAM_UPDATE .NES. "update" +$ THEN +$ SAY "I|Cleaning previous build..." +$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' clean +$ ENDIF +$ +$ SAY "I|Building Boost.Jam..." +$ MCR [.bootstrap_vms]jam0.exe -f build.jam --toolset='BOOST_JAM_TOOLSET' 'ARGS' +$ ENDIF +$ +$ +$EXIT: +$ sts = $STATUS +$ exit 'sts' + (0 * f$verify(save_verify)) + + +$CLEAN: !GOSUB +$ ! +$ IF F$SEARCH("[.bootstrap_vms]*.*") .NES. "" +$ THEN +$ SAY "I|Cleaning previous bootstrap files..." +$ ! +$ SET FILE /PROT=(W:RWED) [.bootstrap_vms]*.*;* +$ DELETE /NOCONF /NOLOG [.bootstrap_vms]*.*;* +$ ENDIF +$ ! +$ IF F$SEARCH("bootstrap_vms.dir") .NES. "" +$ THEN +$ SAY "I|Removing previous bootstrap directory..." +$ ! +$ SET FILE /PROT=(W:RWED) bootstrap_vms.dir +$ DELETE /NOCONF /NOLOG bootstrap_vms.dir; +$ ENDIF +$ ! +$ RETURN diff --git a/src/engine/builtins.c b/src/engine/builtins.c index 7c02d0602..048b6b718 100644 --- a/src/engine/builtins.c +++ b/src/engine/builtins.c @@ -27,6 +27,7 @@ #include "subst.h" #include "timestamp.h" #include "variable.h" +#include "output.h" #include @@ -38,11 +39,25 @@ */ #include #endif + +/* With VC8 (VS2005) these are not defined: + * FSCTL_GET_REPARSE_POINT (expects WINVER >= 0x0500 _WIN32_WINNT >= 0x0500 ) + * IO_REPARSE_TAG_SYMLINK (is part of a separate Driver SDK) + * So define them explicitily to their expected values. + */ +#ifndef FSCTL_GET_REPARSE_POINT +# define FSCTL_GET_REPARSE_POINT 0x000900a8 #endif +#ifndef IO_REPARSE_TAG_SYMLINK +# define IO_REPARSE_TAG_SYMLINK (0xA000000CL) +#endif +#endif /* OS_NT */ #if defined(USE_EXECUNIX) # include # include +#elif defined(OS_VMS) +# include #else /* * NT does not have wait() and associated macros and uses the system() return @@ -441,14 +456,22 @@ void load_builtins() const char * args [] = { "path", 0 }; bind_builtin( "READLINK", builtin_readlink, 0, args ); } - -#ifdef JAM_DEBUGGER { - const char * args [] = { "list", "*", 0 }; - bind_builtin( "__DEBUG_PRINT_HELPER__", builtin_debug_print_helper, 0, args ); + char const * args[] = { "archives", "*", + ":", "member-patterns", "*", + ":", "case-insensitive", "?", + ":", "symbol-patterns", "*", 0 }; + bind_builtin( "GLOB_ARCHIVE", builtin_glob_archive, 0, args ); } +#ifdef JAM_DEBUGGER + + { + const char * args[] = { "list", "*", 0 }; + bind_builtin("__DEBUG_PRINT_HELPER__", builtin_debug_print_helper, 0, args); + } + #endif /* Initialize builtin modules. */ @@ -587,8 +610,8 @@ LIST * builtin_rebuilds( FRAME * frame, int flags ) LIST * builtin_echo( FRAME * frame, int flags ) { list_print( lol_get( frame->args, 0 ) ); - printf( "\n" ); - fflush( stdout ); + out_printf( "\n" ); + out_flush(); return L0; } @@ -603,9 +626,23 @@ LIST * builtin_exit( FRAME * frame, int flags ) { LIST * const code = lol_get( frame->args, 1 ); list_print( lol_get( frame->args, 0 ) ); - printf( "\n" ); + out_printf( "\n" ); if ( !list_empty( code ) ) - exit( atoi( object_str( list_front( code ) ) ) ); + { + int status = atoi( object_str( list_front( code ) ) ); +#ifdef OS_VMS + switch( status ) + { + case 0: + status = EXITOK; + break; + case 1: + status = EXITBAD; + break; + } +#endif + exit( status ); + } else exit( EXITBAD ); /* yeech */ return L0; @@ -736,7 +773,7 @@ LIST * builtin_glob( FRAME * frame, int flags ) globbing.patterns = r; globbing.case_insensitive = -# if defined( OS_NT ) || defined( OS_CYGWIN ) +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) l; /* Always case-insensitive if any files can be found. */ # else lol_get( frame->args, 2 ); @@ -785,7 +822,7 @@ LIST * glob1( OBJECT * dirname, OBJECT * pattern ) globbing.patterns = plist; globbing.case_insensitive -# if defined( OS_NT ) || defined( OS_CYGWIN ) +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) = plist; /* always case-insensitive if any files can be found */ # else = L0; @@ -1024,7 +1061,7 @@ LIST * builtin_hdrmacro( FRAME * frame, int flags ) /* Scan file for header filename macro definitions. */ if ( DEBUG_HEADER ) - printf( "scanning '%s' for header file macro definitions\n", + out_printf( "scanning '%s' for header file macro definitions\n", object_str( list_item( iter ) ) ); macro_headers( t ); @@ -1121,16 +1158,16 @@ void unknown_rule( FRAME * frame, char const * key, module_t * module, { backtrace_line( frame->prev ); if ( key ) - printf("%s error", key); + out_printf("%s error", key); else - printf("ERROR"); - printf( ": rule \"%s\" unknown in ", object_str( rule_name ) ); + out_printf("ERROR"); + out_printf( ": rule \"%s\" unknown in ", object_str( rule_name ) ); if ( module->name ) - printf( "module \"%s\".\n", object_str( module->name ) ); + out_printf( "module \"%s\".\n", object_str( module->name ) ); else - printf( "root module.\n" ); + out_printf( "root module.\n" ); backtrace( frame->prev ); - exit( 1 ); + exit( EXITBAD ); } @@ -1202,15 +1239,15 @@ LIST * builtin_import( FRAME * frame, int flags ) if ( source_iter != source_end || target_iter != target_end ) { backtrace_line( frame->prev ); - printf( "import error: length of source and target rule name lists " + out_printf( "import error: length of source and target rule name lists " "don't match!\n" ); - printf( " source: " ); + out_printf( " source: " ); list_print( source_rules ); - printf( "\n target: " ); + out_printf( "\n target: " ); list_print( target_rules ); - printf( "\n" ); + out_printf( "\n" ); backtrace( frame->prev ); - exit( 1 ); + exit( EXITBAD ); } return L0; @@ -1281,9 +1318,9 @@ void print_source_line( FRAME * frame ) int line; get_source_line( frame, &file, &line ); if ( line < 0 ) - printf( "(builtin):" ); + out_printf( "(builtin):" ); else - printf( "%s:%d:", file, line ); + out_printf( "%s:%d:", file, line ); } @@ -1296,12 +1333,12 @@ void backtrace_line( FRAME * frame ) { if ( frame == 0 ) { - printf( "(no frame):" ); + out_printf( "(no frame):" ); } else { print_source_line( frame ); - printf( " in %s\n", frame->rulename ); + out_printf( " in %s\n", frame->rulename ); } } @@ -1479,8 +1516,8 @@ LIST * builtin_update_now( FRAME * frame, int flags ) /* Flush whatever stdio might have buffered, while descriptions 0 and 1 * still refer to the log file. */ - fflush( stdout ); - fflush( stderr ); + out_flush( ); + err_flush( ); dup2( original_stdout, 0 ); dup2( original_stderr, 1 ); close( original_stdout ); @@ -1679,10 +1716,10 @@ LIST * builtin_native_rule( FRAME * frame, int flags ) else { backtrace_line( frame->prev ); - printf( "error: no native rule \"%s\" defined in module \"%s.\"\n", + out_printf( "error: no native rule \"%s\" defined in module \"%s.\"\n", object_str( list_front( rule_name ) ), object_str( module->name ) ); backtrace( frame->prev ); - exit( 1 ); + exit( EXITBAD ); } return L0; } @@ -2034,14 +2071,14 @@ LIST * builtin_python_import_rule( FRAME * frame, int flags ) { if ( PyErr_Occurred() ) PyErr_Print(); - fprintf( stderr, "Cannot find function \"%s\"\n", python_function ); + err_printf( "Cannot find function \"%s\"\n", python_function ); } Py_DECREF( pModule ); } else { PyErr_Print(); - fprintf( stderr, "Failed to load \"%s\"\n", python_module ); + err_printf( "Failed to load \"%s\"\n", python_module ); } return L0; @@ -2076,6 +2113,73 @@ void lol_build( LOL * lol, char const * * elements ) #ifdef HAVE_PYTHON +static LIST *jam_list_from_string(PyObject *a) +{ + return list_new( object_new( PyString_AsString( a ) ) ); +} + +static LIST *jam_list_from_sequence(PyObject *a) +{ + LIST * l = 0; + + int i = 0; + int s = PySequence_Size( a ); + + for ( ; i < s; ++i ) + { + /* PySequence_GetItem returns new reference. */ + PyObject * e = PySequence_GetItem( a, i ); + char * s = PyString_AsString( e ); + if ( !s ) + { + /* try to get the repr() on the object */ + PyObject *repr = PyObject_Repr(e); + if (repr) + { + const char *str = PyString_AsString(repr); + PyErr_Format(PyExc_TypeError, "expecting type got %s", str); + } + /* fall back to a dumb error */ + else + { + PyErr_BadArgument(); + } + return NULL; + } + l = list_push_back( l, object_new( s ) ); + Py_DECREF( e ); + } + + return l; +} + +static void make_jam_arguments_from_python(FRAME* inner, PyObject *args) +{ + int i; + int size; + + /* Build up the list of arg lists. */ + frame_init( inner ); + inner->prev = 0; + inner->prev_user = 0; + inner->module = bindmodule( constant_python_interface ); + + size = PyTuple_Size( args ); + for (i = 0 ; i < size; ++i) + { + PyObject * a = PyTuple_GetItem( args, i ); + if ( PyString_Check( a ) ) + { + lol_add( inner->args, jam_list_from_string(a) ); + } + else if ( PySequence_Check( a ) ) + { + lol_add( inner->args, jam_list_from_sequence(a) ); + } + } +} + + /* * Calls the bjam rule specified by name passed in 'args'. The name is looked up * in the context of bjam's 'python_interface' module. Returns the list of @@ -2088,50 +2192,18 @@ PyObject * bjam_call( PyObject * self, PyObject * args ) LIST * result; PARSE * p; OBJECT * rulename; - - /* Build up the list of arg lists. */ - frame_init( inner ); - inner->prev = 0; - inner->prev_user = 0; - inner->module = bindmodule( constant_python_interface ); - - /* Extract the rule name and arguments from 'args'. */ + PyObject *args_proper; /* PyTuple_GetItem returns borrowed reference. */ rulename = object_new( PyString_AsString( PyTuple_GetItem( args, 0 ) ) ); + + args_proper = PyTuple_GetSlice(args, 1, PyTuple_Size(args)); + make_jam_arguments_from_python (inner, args_proper); + if ( PyErr_Occurred() ) { - int i = 1; - int size = PyTuple_Size( args ); - for ( ; i < size; ++i ) - { - PyObject * a = PyTuple_GetItem( args, i ); - if ( PyString_Check( a ) ) - { - lol_add( inner->args, list_new( object_new( - PyString_AsString( a ) ) ) ); - } - else if ( PySequence_Check( a ) ) - { - LIST * l = 0; - int s = PySequence_Size( a ); - int i = 0; - for ( ; i < s; ++i ) - { - /* PySequence_GetItem returns new reference. */ - PyObject * e = PySequence_GetItem( a, i ); - char * s = PyString_AsString( e ); - if ( !s ) - { - printf( "Invalid parameter type passed from Python\n" ); - exit( 1 ); - } - l = list_push_back( l, object_new( s ) ); - Py_DECREF( e ); - } - lol_add( inner->args, l ); - } - } + return NULL; } + Py_DECREF(args_proper); result = evaluate_rule( bindrule( rulename, inner->module), rulename, inner ); object_free( rulename ); @@ -2467,6 +2539,9 @@ LIST * builtin_shell( FRAME * frame, int flags ) rtrim( buffer ); string_append( &s, buffer ); } + + /* Explicit EOF check for systems with broken fread */ + if ( feof( p ) ) break; } exit_status = pclose( p ); @@ -2482,6 +2557,11 @@ LIST * builtin_shell( FRAME * frame, int flags ) exit_status = WEXITSTATUS( exit_status ); else exit_status = -1; + +#ifdef OS_VMS + /* Harmonize VMS success status with POSIX */ + if ( exit_status == 1 ) exit_status = EXIT_SUCCESS; +#endif sprintf( buffer, "%d", exit_status ); result = list_push_back( result, object_new( buffer ) ); } @@ -2497,3 +2577,147 @@ LIST * builtin_shell( FRAME * frame, int flags ) } #endif /* #ifdef HAVE_POPEN */ + + +/* + * builtin_glob_archive() - GLOB_ARCHIVE rule + */ + +struct globbing2 +{ + LIST * patterns[ 2 ]; + LIST * results; + LIST * case_insensitive; +}; + + +static void builtin_glob_archive_back( void * closure, OBJECT * member, + LIST * symbols, int status, timestamp const * const time ) +{ + PROFILE_ENTER( BUILTIN_GLOB_ARCHIVE_BACK ); + + struct globbing2 * const globbing = (struct globbing2 *)closure; + PATHNAME f; + string buf[ 1 ]; + LISTITER iter; + LISTITER end; + LISTITER iter_symbols; + LISTITER end_symbols; + int matched = 0; + + /* Match member name. + */ + path_parse( object_str( member ), &f ); + + if ( !strcmp( f.f_member.ptr, "" ) ) + { + PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK ); + return; + } + + string_new( buf ); + string_append_range( buf, f.f_member.ptr, f.f_member.ptr + f.f_member.len ); + + if ( globbing->case_insensitive ) + downcase_inplace( buf->value ); + + /* Glob with member patterns. If not matched, then match symbols. + */ + matched = 0; + iter = list_begin( globbing->patterns[ 0 ] ); + end = list_end( globbing->patterns[ 0 ] ); + for ( ; !matched && iter != end; + iter = list_next( iter ) ) + { + const char * pattern = object_str( list_item( iter ) ); + int match_exact = ( !has_wildcards( pattern ) ); + matched = ( match_exact ? + ( !strcmp( pattern, buf->value ) ) : + ( !glob( pattern, buf->value ) ) ); + } + + + /* Glob with symbol patterns, if requested. + */ + iter = list_begin( globbing->patterns[ 1 ] ); + end = list_end( globbing->patterns[ 1 ] ); + + if ( iter != end ) matched = 0; + + for ( ; !matched && iter != end; + iter = list_next( iter ) ) + { + const char * pattern = object_str( list_item( iter ) ); + int match_exact = ( !has_wildcards( pattern ) ); + + iter_symbols = list_begin( symbols ); + end_symbols = list_end( symbols ); + + for ( ; !matched && iter_symbols != end_symbols; + iter_symbols = list_next( iter_symbols ) ) + { + const char * symbol = object_str( list_item( iter_symbols ) ); + + string_copy( buf, symbol ); + if ( globbing->case_insensitive ) + downcase_inplace( buf->value ); + + matched = ( match_exact ? + ( !strcmp( pattern, buf->value ) ) : + ( !glob( pattern, buf->value ) ) ); + } + } + + if ( matched ) + { + globbing->results = list_push_back( globbing->results, + object_copy( member ) ); + } + + string_free( buf ); + + PROFILE_EXIT( BUILTIN_GLOB_ARCHIVE_BACK ); +} + + +LIST * builtin_glob_archive( FRAME * frame, int flags ) +{ + LIST * const l = lol_get( frame->args, 0 ); + LIST * const r1 = lol_get( frame->args, 1 ); + LIST * const r2 = lol_get( frame->args, 2 ); + LIST * const r3 = lol_get( frame->args, 3 ); + + LISTITER iter; + LISTITER end; + struct globbing2 globbing; + + globbing.results = L0; + globbing.patterns[ 0 ] = r1; + globbing.patterns[ 1 ] = r3; + + globbing.case_insensitive = +# if defined( OS_NT ) || defined( OS_CYGWIN ) || defined( OS_VMS ) + l; /* Always case-insensitive. */ +# else + r2; +# endif + + if ( globbing.case_insensitive ) + { + globbing.patterns[ 0 ] = downcase_list( globbing.patterns[ 0 ] ); + globbing.patterns[ 1 ] = downcase_list( globbing.patterns[ 1 ] ); + } + + iter = list_begin( l ); + end = list_end( l ); + for ( ; iter != end; iter = list_next( iter ) ) + file_archivescan( list_item( iter ), builtin_glob_archive_back, &globbing ); + + if ( globbing.case_insensitive ) + { + list_free( globbing.patterns[ 0 ] ); + list_free( globbing.patterns[ 1 ] ); + } + + return globbing.results; +} diff --git a/src/engine/builtins.h b/src/engine/builtins.h index 89c9c4741..617ad18c6 100644 --- a/src/engine/builtins.h +++ b/src/engine/builtins.h @@ -64,6 +64,7 @@ LIST *builtin_precious( FRAME * frame, int flags ); LIST *builtin_self_path( FRAME * frame, int flags ); LIST *builtin_makedir( FRAME * frame, int flags ); LIST *builtin_readlink( FRAME * frame, int flags ); +LIST *builtin_glob_archive( FRAME * frame, int flags ); LIST *builtin_debug_print_helper( FRAME * frame, int flags ); void backtrace( FRAME *frame ); diff --git a/src/engine/class.c b/src/engine/class.c index a4abfaac8..1a06deb3e 100644 --- a/src/engine/class.c +++ b/src/engine/class.c @@ -15,6 +15,7 @@ #include "rules.h" #include "strings.h" #include "variable.h" +#include "output.h" #include #include @@ -31,7 +32,7 @@ static void check_defined( LIST * class_names ) { if ( !hash_find( classes, list_item( iter ) ) ) { - printf( "Class %s is not defined\n", object_str( list_item( iter ) ) + out_printf( "Class %s is not defined\n", object_str( list_item( iter ) ) ); abort(); } @@ -141,7 +142,7 @@ OBJECT * make_class_module( LIST * xname, LIST * bases, FRAME * frame ) } else { - printf( "Class %s already defined\n", object_str( list_front( xname ) ) + out_printf( "Class %s already defined\n", object_str( list_front( xname ) ) ); abort(); } diff --git a/src/engine/compile.c b/src/engine/compile.c index a690b9fa5..6adb83fa2 100644 --- a/src/engine/compile.c +++ b/src/engine/compile.c @@ -35,6 +35,7 @@ #include "search.h" #include "strings.h" #include "variable.h" +#include "output.h" #include #include @@ -78,7 +79,7 @@ LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame ) debug_compile( 1, buf, frame ); lol_print( frame->args ); - printf( "\n" ); + out_printf( "\n" ); } if ( rule->procedure && rule->module != prev_module ) @@ -218,15 +219,15 @@ static void debug_compile( int which, char const * s, FRAME * frame ) i = ( level + 1 ) * 2; while ( i > 35 ) { - fputs( indent, stdout ); + out_puts( indent ); i -= 35; } - printf( "%*.*s ", i, i, indent ); + out_printf( "%*.*s ", i, i, indent ); } if ( s ) - printf( "%s ", s ); + out_printf( "%s ", s ); level += which; } diff --git a/src/engine/constants.c b/src/engine/constants.c index 891d32294..ce4e3d7e4 100644 --- a/src/engine/constants.c +++ b/src/engine/constants.c @@ -72,6 +72,8 @@ void constants_init( void ) constant_python_interface = object_new( "python_interface" ); constant_extra_pythonpath = object_new( "EXTRA_PYTHONPATH" ); constant_MAIN_PYTHON = object_new( "MAIN_PYTHON" ); + constant_BUILTIN_GLOB_ARCHIVE_BACK= object_new( "BUILTIN_GLOB_ARCHIVE_BACK" ); + constant_FILE_ARCHIVESCAN = object_new( "FILE_ARCHIVESCAN" ); } void constants_done( void ) @@ -129,6 +131,8 @@ void constants_done( void ) object_free( constant_python_interface ); object_free( constant_extra_pythonpath ); object_free( constant_MAIN_PYTHON ); + object_free( constant_FILE_ARCHIVESCAN ); + object_free( constant_BUILTIN_GLOB_ARCHIVE_BACK ); } OBJECT * constant_empty; @@ -184,3 +188,5 @@ OBJECT * constant_python; OBJECT * constant_python_interface; OBJECT * constant_extra_pythonpath; OBJECT * constant_MAIN_PYTHON; +OBJECT * constant_FILE_ARCHIVESCAN; +OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; diff --git a/src/engine/constants.h b/src/engine/constants.h index 60d7073b9..994275719 100644 --- a/src/engine/constants.h +++ b/src/engine/constants.h @@ -69,5 +69,7 @@ extern OBJECT * constant_python; /* "__python__" */ extern OBJECT * constant_python_interface; /* "python_interface" */ extern OBJECT * constant_extra_pythonpath; /* "EXTRA_PYTHONPATH" */ extern OBJECT * constant_MAIN_PYTHON; /* "MAIN_PYTHON" */ +extern OBJECT * constant_FILE_ARCHIVESCAN; /* "FILE_ARCHIVESCAN" */ +extern OBJECT * constant_BUILTIN_GLOB_ARCHIVE_BACK; /* "BUILTIN_GLOB_ARCHIVE_BACK" */ #endif diff --git a/src/engine/cwd.c b/src/engine/cwd.c index 7ebe97045..b277262f6 100644 --- a/src/engine/cwd.c +++ b/src/engine/cwd.c @@ -46,7 +46,12 @@ void cwd_init( void ) do { char * const buffer = BJAM_MALLOC_RAW( buffer_size ); +#ifdef OS_VMS + /* cwd in POSIX-format */ + cwd_buffer = getcwd( buffer, buffer_size, 0 ); +#else cwd_buffer = getcwd( buffer, buffer_size ); +#endif error = errno; if ( cwd_buffer ) { diff --git a/src/engine/debug.c b/src/engine/debug.c index 2a656551b..b4601066f 100644 --- a/src/engine/debug.c +++ b/src/engine/debug.c @@ -7,7 +7,7 @@ #include "jam.h" #include "debug.h" - +#include "output.h" #include "hash.h" @@ -125,7 +125,7 @@ static void dump_profile_entry( void * p_, void * ignored ) profile_total.cumulative += p->net; profile_total.memory += p->memory; } - printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries, + out_printf( "%10ld %12.6f %12.6f %12.8f %10ld %10ld %s\n", p->num_entries, cumulative, net, q, p->memory, mem_each, object_str( p->name ) ); } @@ -134,7 +134,7 @@ void profile_dump() { if ( profile_hash ) { - printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--", + out_printf( "%10s %12s %12s %12s %10s %10s %s\n", "--count--", "--gross--", "--net--", "--each--", "--mem--", "--each--", "--name--" ); hashenumerate( profile_hash, dump_profile_entry, 0 ); profile_other.name = constant_other; diff --git a/src/engine/execcmd.c b/src/engine/execcmd.c index f751cbff3..193c90611 100644 --- a/src/engine/execcmd.c +++ b/src/engine/execcmd.c @@ -117,5 +117,5 @@ int interrupted( void ) void onintr( int disp ) { ++intr; - printf( "...interrupted\n" ); + out_printf( "...interrupted\n" ); } diff --git a/src/engine/execnt.c b/src/engine/execnt.c index d75aab0ae..7e350d902 100644 --- a/src/engine/execnt.c +++ b/src/engine/execnt.c @@ -37,6 +37,7 @@ */ #include "jam.h" +#include "output.h" #ifdef USE_EXECNT #include "execcmd.h" @@ -290,12 +291,12 @@ void exec_cmd if ( DEBUG_EXECCMD ) if ( is_raw_cmd ) - printf( "Executing raw command directly\n" ); + out_printf( "Executing raw command directly\n" ); else { - printf( "Executing using a command file and the shell: " ); + out_printf( "Executing using a command file and the shell: " ); list_print( shell ); - printf( "\n" ); + out_printf( "\n" ); } /* If we are running a raw command directly - trim its leading whitespaces @@ -480,7 +481,7 @@ static void invoke_cmd( char const * const command, int const slot ) si.hStdInput = GetStdHandle( STD_INPUT_HANDLE ); if ( DEBUG_EXECCMD ) - printf( "Command string for CreateProcessA(): '%s'\n", command ); + out_printf( "Command string for CreateProcessA(): '%s'\n", command ); /* Run the command by creating a sub-process for it. */ if ( !CreateProcessA( @@ -1289,7 +1290,7 @@ static char const * prepare_command_file( string const * command, int slot ) FILE * const f = open_command_file( slot ); if ( !f ) { - printf( "failed to write command file!\n" ); + err_printf( "failed to write command file!\n" ); exit( EXITBAD ); } fputs( command->value, f ); @@ -1308,7 +1309,7 @@ static int get_free_cmdtab_slot() for ( slot = 0; slot < MAXJOBS; ++slot ) if ( !cmdtab[ slot ].pi.hProcess ) return slot; - printf( "no slots for child!\n" ); + err_printf( "no slots for child!\n" ); exit( EXITBAD ); } diff --git a/src/engine/execunix.c b/src/engine/execunix.c index 297c00377..8ee4d9dbf 100644 --- a/src/engine/execunix.c +++ b/src/engine/execunix.c @@ -164,11 +164,11 @@ void exec_cmd if ( DEBUG_EXECCMD ) { int i; - printf( "Using shell: " ); + out_printf( "Using shell: " ); list_print( shell ); - printf( "\n" ); + out_printf( "\n" ); for ( i = 0; argv[ i ]; ++i ) - printf( " argv[%d] = '%s'\n", i, argv[ i ] ); + out_printf( " argv[%d] = '%s'\n", i, argv[ i ] ); } /* Create pipes for collecting child output. */ @@ -528,7 +528,7 @@ void exec_wait() break; if ( pid != cmdtab[ i ].pid ) { - printf( "unknown pid %d with errno = %d\n", pid, errno ); + err_printf( "unknown pid %d with errno = %d\n", pid, errno ); exit( EXITBAD ); } @@ -592,7 +592,7 @@ static int get_free_cmdtab_slot() for ( slot = 0; slot < MAXJOBS; ++slot ) if ( !cmdtab[ slot ].pid ) return slot; - printf( "no slots for child!\n" ); + err_printf( "no slots for child!\n" ); exit( EXITBAD ); } diff --git a/src/engine/execvms.c b/src/engine/execvms.c new file mode 100644 index 000000000..15e9d2807 --- /dev/null +++ b/src/engine/execvms.c @@ -0,0 +1,418 @@ +/* + * Copyright 1993, 1995 Christopher Seiwald. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + */ + + +/* + * execvms.c - execute a shell script, ala VMS. + * + * The approach is this: + * + * If the command is a single line, and shorter than WRTLEN (what we believe to + * be the maximum line length), we just system() it. + * + * If the command is multi-line, or longer than WRTLEN, we write the command + * block to a temp file, splitting long lines (using "-" at the end of the line + * to indicate contiuation), and then source that temp file. We use special + * logic to make sure we do not continue in the middle of a quoted string. + * + * 05/04/94 (seiwald) - async multiprocess interface; noop on VMS + * 12/20/96 (seiwald) - rewritten to handle multi-line commands well + * 01/14/96 (seiwald) - do not put -'s between "'s + * 01/19/15 (shepilko)- adapt for jam-3.1.19 + */ + +#include "jam.h" +#include "lists.h" +#include "execcmd.h" +#include "output.h" + +#ifdef OS_VMS + +#include +#include +#include +#include +#include +#include +#include + + +#define WRTLEN 240 + +#define MIN( a, b ) ((a) < (b) ? (a) : (b)) + +#define CHAR_DQUOTE '"' + +#define VMS_PATH_MAX 1024 +#define VMS_COMMAND_MAX 1024 + +#define VMS_WARNING 0 +#define VMS_SUCCESS 1 +#define VMS_ERROR 2 +#define VMS_FATAL 4 + +char commandbuf[ VMS_COMMAND_MAX ] = { 0 }; + + +static int get_status(int vms_status); +static clock_t get_cpu_time(); + +/* + * exec_check() - preprocess and validate the command. + */ + +int exec_check +( + string const * command, + LIST * * pShell, + int * error_length, + int * error_max_length +) +{ + int const is_raw_cmd = 1; + + /* We allow empty commands for non-default shells since we do not really + * know what they are going to do with such commands. + */ + if ( !command->size && ( is_raw_cmd || list_empty( *pShell ) ) ) + return EXEC_CHECK_NOOP; + + return is_raw_cmd + ? EXEC_CHECK_OK + : check_cmd_for_too_long_lines( command->value, MAXLINE, error_length, + error_max_length ); +} + + +/* + * exec_cmd() - execute system command. + */ + +void exec_cmd +( + string const * command, + ExecCmdCallback func, + void * closure, + LIST * shell +) +{ + char * s; + char * e; + char * p; + int vms_status; + int status; + int rstat = EXEC_CMD_OK; + int exit_reason = EXIT_OK; + timing_info time_info; + timestamp start_dt; + struct tms start_time; + struct tms end_time; + char * cmd_string = command->value; + + + /* Start the command */ + + timestamp_current( &time_info.start ); + times( &start_time ); + + /* See if command is more than one line discounting leading/trailing white + * space. + */ + for ( s = cmd_string; *s && isspace( *s ); ++s ); + + e = p = strchr( s, '\n' ); + + while ( p && isspace( *p ) ) + ++p; + + /* If multi line or long, write to com file. Otherwise, exec directly. */ + if ( ( p && *p ) || ( e - s > WRTLEN ) ) + { + FILE * f; + + /* Create temp file invocation. */ + + if ( !*commandbuf ) + { + OBJECT * tmp_filename = 0; + + tmp_filename = path_tmpfile(); + + + /* Get tmp file name is VMS-format. */ + { + string os_filename[ 1 ]; + string_new( os_filename ); + path_translate_to_os( object_str( tmp_filename ), os_filename ); + object_free( tmp_filename ); + tmp_filename = object_new( os_filename->value ); + string_free( os_filename ); + } + + commandbuf[0] = '@'; + strncat( commandbuf + 1, object_str( tmp_filename ), + VMS_COMMAND_MAX - 2); + } + + + /* Open tempfile. */ + if ( !( f = fopen( commandbuf + 1, "w" ) ) ) + { + printf( "can't open cmd_string file\n" ); + rstat = EXEC_CMD_FAIL; + exit_reason = EXIT_FAIL; + + times( &end_time ); + + timestamp_current( &time_info.end ); + time_info.system = (double)( end_time.tms_cstime - + start_time.tms_cstime ) / 100.; + time_info.user = (double)( end_time.tms_cutime - + start_time.tms_cutime ) / 100.; + + (*func)( closure, rstat, &time_info, "" , "", exit_reason ); + return; + } + + + /* Running from TMP, so explicitly set default to CWD. */ + { + char * cwd = NULL; + int cwd_buf_size = VMS_PATH_MAX; + + while ( !(cwd = getcwd( NULL, cwd_buf_size ) ) /* alloc internally */ + && errno == ERANGE ) + { + cwd_buf_size += VMS_PATH_MAX; + } + + if ( !cwd ) + { + perror( "can not get current working directory" ); + exit( EXITBAD ); + } + + fprintf( f, "$ SET DEFAULT %s\n", cwd); + + free( cwd ); + } + + + /* For each line of the command. */ + while ( *cmd_string ) + { + char * s = strchr( cmd_string,'\n' ); + int len = s ? s + 1 - cmd_string : strlen( cmd_string ); + + fputc( '$', f ); + + /* For each chunk of a line that needs to be split. */ + while ( len > 0 ) + { + char * q = cmd_string; + char * qe = cmd_string + MIN( len, WRTLEN ); + char * qq = q; + int quote = 0; + + /* Look for matching "s -- expected in the same line. */ + for ( ; q < qe; ++q ) + if ( ( *q == CHAR_DQUOTE ) && ( quote = !quote ) ) + qq = q; + + /* When needs splitting and is inside an open quote, + * back up to opening quote and split off at it. + * When the quoted string spans over a chunk, + * pass string as a whole. + * If no matching quote found, dump the rest of command. + */ + if ( len > WRTLEN && quote ) + { + q = qq; + + if ( q == cmd_string ) + { + for ( q = qe; q < ( cmd_string + len ) + && *q != CHAR_DQUOTE ; ++q) {} + q = ( *q == CHAR_DQUOTE) ? ( q + 1 ) : ( cmd_string + len ); + } + } + + fwrite( cmd_string, ( q - cmd_string ), 1, f ); + + len -= ( q - cmd_string ); + cmd_string = q; + + if ( len ) + { + fputc( '-', f ); + fputc( '\n', f ); + } + } + } + + fclose( f ); + + if ( DEBUG_EXECCMD ) + { + FILE * f; + char buf[ WRTLEN + 1 ] = { 0 }; + + if ( (f = fopen( commandbuf + 1, "r" ) ) ) + { + int nbytes; + printf( "Command file: %s\n", commandbuf + 1 ); + + do + { + nbytes = fread( buf, sizeof( buf[0] ), sizeof( buf ) - 1, f ); + + if ( nbytes ) fwrite(buf, sizeof( buf[0] ), nbytes, stdout); + } + while ( !feof(f) ); + + fclose(f); + } + } + + /* Execute command file */ + vms_status = system( commandbuf ); + status = get_status( vms_status ); + + unlink( commandbuf + 1 ); + } + else + { + /* Execute single line command. Strip trailing newline before execing. + * TODO:Call via popen() with capture of the output may be better here. + */ + if ( e ) *e = 0; + + status = VMS_SUCCESS; /* success on empty command */ + if ( *s ) + { + vms_status = system( s ); + status = get_status( vms_status ); + } + } + + + times( &end_time ); + + timestamp_current( &time_info.end ); + time_info.system = (double)( end_time.tms_cstime - + start_time.tms_cstime ) / 100.; + time_info.user = (double)( end_time.tms_cutime - + start_time.tms_cutime ) / 100.; + + + /* Fail for error or fatal error. OK on OK, warning or info exit. */ + if ( ( status == VMS_ERROR ) || ( status == VMS_FATAL ) ) + { + rstat = EXEC_CMD_FAIL; + exit_reason = EXIT_FAIL; + } + + (*func)( closure, rstat, &time_info, "" , "", exit_reason ); +} + + +void exec_wait() +{ + return; +} + + +/* get_status() - returns status of the VMS command execution. + - Map VMS status to its severity (lower 3-bits) + - W-DCL-IVVERB is returned on unrecognized command -- map to general ERROR +*/ +int get_status( int vms_status ) +{ +#define VMS_STATUS_DCL_IVVERB 0x00038090 + + int status; + + switch (vms_status) + { + case VMS_STATUS_DCL_IVVERB: + status = VMS_ERROR; + break; + + default: + status = vms_status & 0x07; /* $SEVERITY bits */ + } + + return status; +} + + +#define __NEW_STARLET 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +/* + * get_cpu_time() - returns CPU time in CLOCKS_PER_SEC since process start. + * on error returns (clock_t)-1. + * + * Intended to emulate (system + user) result of *NIX times(), if CRTL times() + * is not available. +* However, this accounts only for the current process. To account for child +* processes, these need to be directly spawned/forked via exec(). +* Moreover, child processes should be running a C main program or a program +* that calls VAXC$CRTL_INIT or DECC$CRTL_INIT. +*/ + +clock_t get_cpu_time() +{ + clock_t result = (clock_t) 0; + + IOSB iosb; + int status; + long cputime = 0; + + + ILE3 jpi_items[] = { + { sizeof( cputime ), JPI$_CPUTIM, &cputime, NULL }, /* longword int, 10ms */ + { 0 }, + }; + + status = sys$getjpiw (EFN$C_ENF, 0, 0, jpi_items, &iosb, 0, 0); + + if ( !$VMS_STATUS_SUCCESS( status ) ) + { + lib$signal( status ); + + result = (clock_t) -1; + return result; + } + + + result = ( cputime / 100 ) * CLOCKS_PER_SEC; + + return result; +} + + +# endif /* VMS */ + diff --git a/src/engine/filent.c b/src/engine/filent.c index 00dcc49b3..4f19a56b5 100644 --- a/src/engine/filent.c +++ b/src/engine/filent.c @@ -24,6 +24,8 @@ * file_collect_dir_content_() - collects directory content information * file_dirscan_() - OS specific file_dirscan() implementation * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation */ #include "jam.h" @@ -33,6 +35,7 @@ #include "object.h" #include "pathsys.h" #include "strings.h" +#include "output.h" #ifdef __BORLANDC__ # undef FILENAME /* cpp namespace collision */ @@ -243,7 +246,7 @@ int try_file_query_root( file_info_t * const info ) { return 0; } - + /* We have a root path */ if ( !GetFileAttributesExA( buf, GetFileExInfoStandard, &fileData ) ) { @@ -348,27 +351,83 @@ struct ar_hdr #define SARFMAG 2 #define SARHDR sizeof( struct ar_hdr ) -void file_archscan( char const * archive, scanback func, void * closure ) +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ + +int file_collect_archive_content_( file_archive_info_t * const archive ) { struct ar_hdr ar_hdr; char * string_table = 0; char buf[ MAXJPATH ]; long offset; - int const fd = open( archive, O_RDONLY | O_BINARY, 0 ); + const char * path = object_str( archive->file->name ); + int const fd = open( path , O_RDONLY | O_BINARY, 0 ); + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); if ( fd < 0 ) - return; + return -1; if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) ) { close( fd ); - return; + return -1; } offset = SARMAG; if ( DEBUG_BINDSCAN ) - printf( "scan archive %s\n", archive ); + out_printf( "scan archive %s\n", path ); while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) && !memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) ) @@ -391,7 +450,7 @@ void file_archscan( char const * archive, scanback func, void * closure ) */ string_table = BJAM_MALLOC_ATOMIC( lar_size + 1 ); if ( read( fd, string_table, lar_size ) != lar_size ) - printf( "error reading string table\n" ); + out_printf( "error reading string table\n" ); string_table[ lar_size ] = '\0'; offset += SARHDR + lar_size; continue; @@ -429,13 +488,23 @@ void file_archscan( char const * archive, scanback func, void * closure ) name = c + 1; } - sprintf( buf, "%s(%.*s)", archive, endname - name, name ); + sprintf( buf, "%.*s", endname - name, name ); + + if ( strcmp( buf, "") != 0 ) { - OBJECT * const member = object_new( buf ); - timestamp time; - timestamp_init( &time, (time_t)lar_date, 0 ); - (*func)( closure, member, 1 /* time valid */, &time ); - object_free( member ); + file_info_t * member = 0; + + /* NT static libraries appear to store the objects in a sequence + * reverse to the order in which they were inserted. + * Here we reverse the stored sequence by pushing members to front of + * member file list to get the intended members order. + */ + archive->members = filelist_push_front( archive->members, object_new( buf ) ); + member = filelist_front( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); } offset += SARHDR + lar_size; @@ -443,6 +512,8 @@ void file_archscan( char const * archive, scanback func, void * closure ) } close( fd ); + + return 0; } #endif /* OS_NT */ diff --git a/src/engine/filesys.c b/src/engine/filesys.c index dadaef82e..e9612bb2f 100644 --- a/src/engine/filesys.c +++ b/src/engine/filesys.c @@ -34,6 +34,7 @@ #include "object.h" #include "pathsys.h" #include "strings.h" +#include "output.h" #include #include @@ -47,12 +48,92 @@ void file_dirscan_( file_info_t * const dir, scanback func, void * closure ); int file_collect_dir_content_( file_info_t * const dir ); void file_query_( file_info_t * const ); +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ); +int file_collect_archive_content_( file_archive_info_t * const archive ); +void file_archive_query_( file_archive_info_t * const ); + +static void file_archivescan_impl( OBJECT * path, archive_scanback func, + void * closure ); static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure ); +static void free_file_archive_info( void * xarchive, void * data ); static void free_file_info( void * xfile, void * data ); + static void remove_files_atexit( void ); static struct hash * filecache_hash; +static struct hash * archivecache_hash; + + +/* + * file_archive_info() - return cached information about an archive + * + * Returns a default initialized structure containing only queried file's info + * in case this is the first time this file system entity has been + * referenced. + */ + +file_archive_info_t * file_archive_info( OBJECT * const path, int * found ) +{ + OBJECT * const path_key = path_as_key( path ); + file_archive_info_t * archive; + + if ( !archivecache_hash ) + archivecache_hash = hashinit( sizeof( file_archive_info_t ), + "file_archive_info" ); + + archive = (file_archive_info_t *)hash_insert( archivecache_hash, path_key, + found ); + + if ( !*found ) + { + archive->file = 0; + archive->members = FL0; + } + else + object_free( path_key ); + + return archive; +} + + +/* + * file_archive_query() - get cached information about a archive file path + * + * Returns 0 in case querying the OS about the given path fails, e.g. because + * the path does not reference an existing file system object. + */ + +file_archive_info_t * file_archive_query( OBJECT * const path ) +{ + int found; + file_archive_info_t * const archive = file_archive_info( path, &found ); + file_info_t * file = file_query( path ); + + if ( !( file && file->is_file ) ) + { + return 0; + } + + archive->file = file; + + + return archive; +} + + + +/* + * file_archivescan() - scan an archive for members + */ + +void file_archivescan( OBJECT * path, archive_scanback func, void * closure ) +{ + PROFILE_ENTER( FILE_ARCHIVESCAN ); + file_archivescan_impl( path, func, closure ); + PROFILE_EXIT( FILE_ARCHIVESCAN ); +} /* @@ -63,14 +144,14 @@ void file_build1( PATHNAME * const f, string * file ) { if ( DEBUG_SEARCH ) { - printf( "build file: " ); + out_printf( "build file: " ); if ( f->f_root.len ) - printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr ); + out_printf( "root = '%.*s' ", f->f_root.len, f->f_root.ptr ); if ( f->f_dir.len ) - printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr ); + out_printf( "dir = '%.*s' ", f->f_dir.len, f->f_dir.ptr ); if ( f->f_base.len ) - printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr ); - printf( "\n" ); + out_printf( "base = '%.*s' ", f->f_base.len, f->f_base.ptr ); + out_printf( "\n" ); } /* Start with the grist. If the current grist is not surrounded by <>'s, add @@ -112,6 +193,12 @@ void file_done() hashenumerate( filecache_hash, free_file_info, (void *)0 ); hashdone( filecache_hash ); } + + if ( archivecache_hash ) + { + hashenumerate( archivecache_hash, free_file_archive_info, (void *)0 ); + hashdone( archivecache_hash ); + } } @@ -259,6 +346,55 @@ void file_remove_atexit( OBJECT * const path ) } +/* + * file_archivescan_impl() - no-profiling worker for file_archivescan() + */ + +static void file_archivescan_impl( OBJECT * path, archive_scanback func, void * closure ) +{ + file_archive_info_t * const archive = file_archive_query( path ); + if ( !archive || !archive->file->is_file ) + return; + + /* Lazy collect the archive content information. */ + if ( filelist_empty( archive->members ) ) + { + if ( DEBUG_BINDSCAN ) + printf( "scan archive %s\n", object_str( archive->file->name ) ); + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* OS specific part of the file_archivescan operation. */ + file_archivescan_( archive, func, closure ); + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, symbols, 1, &member_file->time ); + object_free( member ); + } + } + } +} + + /* * file_dirscan_impl() - no-profiling worker for file_dirscan() */ @@ -273,7 +409,7 @@ static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure ) if ( list_empty( d->files ) ) { if ( DEBUG_BINDSCAN ) - printf( "scan directory %s\n", object_str( d->name ) ); + out_printf( "scan directory %s\n", object_str( d->name ) ); if ( file_collect_dir_content_( d ) < 0 ) return; } @@ -307,6 +443,14 @@ static void file_dirscan_impl( OBJECT * dir, scanback func, void * closure ) } +static void free_file_archive_info( void * xarchive, void * data ) +{ + file_archive_info_t * const archive = (file_archive_info_t *)xarchive; + + if ( archive ) filelist_free( archive->members ); +} + + static void free_file_info( void * xfile, void * data ) { file_info_t * const file = (file_info_t *)xfile; @@ -324,3 +468,209 @@ static void remove_files_atexit( void ) list_free( files_to_remove ); files_to_remove = L0; } + + +/* + * FILELIST linked-list implementation + */ + +FILELIST * filelist_new( OBJECT * path ) +{ + FILELIST * list = (FILELIST *)BJAM_MALLOC( sizeof( FILELIST ) ); + + memset( list, 0, sizeof( *list ) ); + list->size = 0; + list->head = 0; + list->tail = 0; + + return filelist_push_back( list, path ); +} + +FILELIST * filelist_push_back( FILELIST * list, OBJECT * path ) +{ + FILEITEM * item; + file_info_t * file; + + /* Lazy initialization + */ + if ( filelist_empty( list ) ) + { + list = filelist_new( path ); + return list; + } + + + item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) ); + memset( item, 0, sizeof( *item ) ); + item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) ); + + file = item->value; + memset( file, 0, sizeof( *file ) ); + + file->name = path; + file->files = L0; + + if ( list->tail ) + { + list->tail->next = item; + } + else + { + list->head = item; + } + list->tail = item; + list->size++; + + return list; +} + +FILELIST * filelist_push_front( FILELIST * list, OBJECT * path ) +{ + FILEITEM * item; + file_info_t * file; + + /* Lazy initialization + */ + if ( filelist_empty( list ) ) + { + list = filelist_new( path ); + return list; + } + + + item = (FILEITEM *)BJAM_MALLOC( sizeof( FILEITEM ) ); + memset( item, 0, sizeof( *item ) ); + item->value = (file_info_t *)BJAM_MALLOC( sizeof( file_info_t ) ); + + file = item->value; + memset( file, 0, sizeof( *file ) ); + + file->name = path; + file->files = L0; + + if ( list->head ) + { + item->next = list->head; + } + else + { + list->tail = item; + } + list->head = item; + list->size++; + + return list; +} + + +FILELIST * filelist_pop_front( FILELIST * list ) +{ + FILEITEM * item; + + if ( filelist_empty( list ) ) return list; + + item = list->head; + + if ( item ) + { + if ( item->value ) free_file_info( item->value, 0 ); + + list->head = item->next; + list->size--; + if ( !list->size ) list->tail = list->head; + +#ifdef BJAM_NO_MEM_CACHE + BJAM_FREE( item ); +#endif + } + + return list; +} + +int filelist_length( FILELIST * list ) +{ + int result = 0; + if ( !filelist_empty( list ) ) result = list->size; + + return result; +} + +void filelist_free( FILELIST * list ) +{ + FILELISTITER iter; + + if ( filelist_empty( list ) ) return; + + while ( filelist_length( list ) ) filelist_pop_front( list ); + +#ifdef BJAM_NO_MEM_CACHE + BJAM_FREE( list ); +#endif +} + +int filelist_empty( FILELIST * list ) +{ + return ( list == FL0 ); +} + + +FILELISTITER filelist_begin( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->head == 0 ) return (FILELISTITER)0; + + return &list->head->value; +} + + +FILELISTITER filelist_end( FILELIST * list ) +{ + return (FILELISTITER)0; +} + + +FILELISTITER filelist_next( FILELISTITER iter ) +{ + if ( iter ) + { + /* Given FILEITEM.value is defined as first member of FILEITEM structure + * and FILELISTITER = &FILEITEM.value, + * FILEITEM = *(FILEITEM **)FILELISTITER + */ + FILEITEM * item = (FILEITEM *)iter; + iter = ( item->next ? &item->next->value : (FILELISTITER)0 ); + } + + return iter; +} + + +file_info_t * filelist_item( FILELISTITER it ) +{ + file_info_t * result = (file_info_t *)0; + + if ( it ) + { + result = (file_info_t *)*it; + } + + return result; +} + + +file_info_t * filelist_front( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->head == 0 ) return (file_info_t *)0; + + return list->head->value; +} + + +file_info_t * filelist_back( FILELIST * list ) +{ + if ( filelist_empty( list ) + || list->tail == 0 ) return (file_info_t *)0; + + return list->tail->value; +} diff --git a/src/engine/filesys.h b/src/engine/filesys.h index 74fa3958f..5b7f3929b 100644 --- a/src/engine/filesys.h +++ b/src/engine/filesys.h @@ -34,11 +34,38 @@ typedef struct file_info_t LIST * files; } file_info_t; +typedef struct file_item FILEITEM; +struct file_item +{ + file_info_t * value; /* expected to be equvalent with &FILEITEM */ + FILEITEM * next; +}; + +typedef struct file_list +{ + FILEITEM * head; + FILEITEM * tail; + int size; +} FILELIST; + +typedef file_info_t * * FILELISTITER; /* also &FILEITEM equivalent */ + + +typedef struct file_archive_info_t +{ + file_info_t * file; + FILELIST * members; +} file_archive_info_t; + + +typedef void (*archive_scanback)( void * closure, OBJECT * path, LIST * symbols, + int found, timestamp const * const ); typedef void (*scanback)( void * closure, OBJECT * path, int found, timestamp const * const ); void file_archscan( char const * arch, scanback func, void * closure ); +void file_archivescan( OBJECT * path, archive_scanback func, void * closure ); void file_build1( PATHNAME * const f, string * file ) ; void file_dirscan( OBJECT * dir, scanback func, void * closure ); file_info_t * file_info( OBJECT * const path, int * found ); @@ -49,6 +76,29 @@ void file_remove_atexit( OBJECT * const path ); void file_supported_fmt_resolution( timestamp * const ); int file_time( OBJECT * const path, timestamp * const ); + +/* Archive/library file support */ +file_archive_info_t * file_archive_info( OBJECT * const path, int * found ); +file_archive_info_t * file_archive_query( OBJECT * const path ); + +/* FILELIST linked-list */ +FILELIST * filelist_new( OBJECT * path ); +FILELIST * filelist_push_back( FILELIST * list, OBJECT * path ); +FILELIST * filelist_push_front( FILELIST * list, OBJECT * path ); +FILELIST * filelist_pop_front( FILELIST * list ); +int filelist_length( FILELIST * list ); +void filelist_free( FILELIST * list ); + +FILELISTITER filelist_begin( FILELIST * list ); +FILELISTITER filelist_end( FILELIST * list ); +FILELISTITER filelist_next( FILELISTITER it ); +file_info_t * filelist_item( FILELISTITER it ); +file_info_t * filelist_front( FILELIST * list ); +file_info_t * filelist_back( FILELIST * list ); + +#define FL0 ((FILELIST *)0) + + /* Internal utility worker functions. */ void file_query_posix_( file_info_t * const ); diff --git a/src/engine/fileunix.c b/src/engine/fileunix.c index e8ea51507..766b0afdd 100644 --- a/src/engine/fileunix.c +++ b/src/engine/fileunix.c @@ -23,6 +23,8 @@ * file_collect_dir_content_() - collects directory content information * file_dirscan_() - OS specific file_dirscan() implementation * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation */ #include "jam.h" @@ -32,6 +34,7 @@ #include "object.h" #include "pathsys.h" #include "strings.h" +#include "output.h" #include #include @@ -220,13 +223,65 @@ void file_supported_fmt_resolution( timestamp * const t ) /* * file_archscan() - scan an archive for files */ +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ #ifndef AIAMAG /* God-fearing UNIX */ #define SARFMAG 2 #define SARHDR sizeof( struct ar_hdr ) -void file_archscan( char const * archive, scanback func, void * closure ) +int file_collect_archive_content_( file_archive_info_t * const archive ) { #ifndef NO_AR struct ar_hdr ar_hdr; @@ -234,21 +289,24 @@ void file_archscan( char const * archive, scanback func, void * closure ) char buf[ MAXJPATH ]; long offset; int fd; + const char * path = object_str( archive->file->name ); - if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 ) - return; + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 ) + return -1; if ( read( fd, buf, SARMAG ) != SARMAG || strncmp( ARMAG, buf, SARMAG ) ) { close( fd ); - return; + return -1; } offset = SARMAG; if ( DEBUG_BINDSCAN ) - printf( "scan archive %s\n", archive ); + out_printf( "scan archive %s\n", path ); while ( ( read( fd, &ar_hdr, SARHDR ) == SARHDR ) && !( memcmp( ar_hdr.ar_fmag, ARFMAG, SARFMAG ) @@ -283,7 +341,7 @@ void file_archscan( char const * archive, scanback func, void * closure ) string_table = (char *)BJAM_MALLOC_ATOMIC( lar_size ); lseek( fd, offset + SARHDR, 0 ); if ( read( fd, string_table, lar_size ) != lar_size ) - printf("error reading string table\n"); + out_printf("error reading string table\n"); } else if ( string_table && ar_hdr.ar_name[ 1 ] != ' ' ) { @@ -305,16 +363,20 @@ void file_archscan( char const * archive, scanback func, void * closure ) *c = '\0'; if ( DEBUG_BINDSCAN ) - printf( "archive name %s found\n", lar_name ); + out_printf( "archive name %s found\n", lar_name ); - sprintf( buf, "%s(%s)", archive, lar_name ); + sprintf( buf, "%s", lar_name ); + if ( strcmp( buf, "") != 0 ) { - OBJECT * const member = object_new( buf ); - timestamp time; - timestamp_init( &time, (time_t)lar_date, 0 ); - (*func)( closure, member, 1 /* time valid */, &time ); - object_free( member ); + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); } offset += SARHDR + ( ( lar_size + 1 ) & ~1 ); @@ -326,12 +388,13 @@ void file_archscan( char const * archive, scanback func, void * closure ) close( fd ); #endif /* NO_AR */ + + return 0; } #else /* AIAMAG - RS6000 AIX */ -static void file_archscan_small( int fd, char const * archive, scanback func, - void * closure ) +static void collect_archive_content_small( int fd, file_archive_info_t * const archive ) { struct fl_hdr fl_hdr; @@ -342,6 +405,7 @@ static void file_archscan_small( int fd, char const * archive, scanback func, char buf[ MAXJPATH ]; long offset; + const char * path = object_str( archive->file->name ); if ( read( fd, (char *)&fl_hdr, FL_HSZ ) != FL_HSZ ) return; @@ -349,7 +413,7 @@ static void file_archscan_small( int fd, char const * archive, scanback func, sscanf( fl_hdr.fl_fstmoff, "%ld", &offset ); if ( DEBUG_BINDSCAN ) - printf( "scan archive %s\n", archive ); + out_printf( "scan archive %s\n", path ); while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 && read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= (int)sizeof( ar_hdr.hdr ) ) @@ -366,14 +430,18 @@ static void file_archscan_small( int fd, char const * archive, scanback func, ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0'; - sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name ); + sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name ); + if ( strcmp( buf, "") != 0 ) { - OBJECT * const member = object_new( buf ); - timestamp time; - timestamp_init( &time, (time_t)lar_date, 0 ); - (*func)( closure, member, 1 /* time valid */, &time ); - object_free( member ); + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); } } } @@ -381,8 +449,7 @@ static void file_archscan_small( int fd, char const * archive, scanback func, /* Check for OS versions supporting the big variant. */ #ifdef AR_HSZ_BIG -static void file_archscan_big( int fd, char const * archive, scanback func, - void * closure ) +static void collect_archive_content_big( int fd, file_archive_info_t * const archive ) { struct fl_hdr_big fl_hdr; @@ -393,6 +460,7 @@ static void file_archscan_big( int fd, char const * archive, scanback func, char buf[ MAXJPATH ]; long long offset; + const char * path = object_str( archive->file->name ); if ( read( fd, (char *)&fl_hdr, FL_HSZ_BIG ) != FL_HSZ_BIG ) return; @@ -400,7 +468,7 @@ static void file_archscan_big( int fd, char const * archive, scanback func, sscanf( fl_hdr.fl_fstmoff, "%lld", &offset ); if ( DEBUG_BINDSCAN ) - printf( "scan archive %s\n", archive ); + out_printf( "scan archive %s\n", path ); while ( offset > 0 && lseek( fd, offset, 0 ) >= 0 && read( fd, &ar_hdr, sizeof( ar_hdr ) ) >= sizeof( ar_hdr.hdr ) ) @@ -417,49 +485,58 @@ static void file_archscan_big( int fd, char const * archive, scanback func, ar_hdr.hdr._ar_name.ar_name[ lar_namlen ] = '\0'; - sprintf( buf, "%s(%s)", archive, ar_hdr.hdr._ar_name.ar_name ); + sprintf( buf, "%s", ar_hdr.hdr._ar_name.ar_name ); + if ( strcmp( buf, "") != 0 ) { - OBJECT * const member = object_new( buf ); - timestamp time; - timestamp_init( &time, (time_t)lar_date, 0 ); - (*func)( closure, member, 1 /* time valid */, &time ); - object_free( member ); + file_info_t * member = 0; + + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)lar_date, 0 ); } } } #endif /* AR_HSZ_BIG */ -void file_archscan( char const * archive, scanback func, void * closure ) +int file_collect_archive_content_( file_archive_info_t * const archive ) { int fd; char fl_magic[ SAIAMAG ]; + const char * path = object_str( archive->file->name ); - if ( ( fd = open( archive, O_RDONLY, 0 ) ) < 0 ) - return; + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + if ( ( fd = open( path, O_RDONLY, 0 ) ) < 0 ) + return -1; if ( read( fd, fl_magic, SAIAMAG ) != SAIAMAG || lseek( fd, 0, SEEK_SET ) == -1 ) { close( fd ); - return; + return -1; } if ( !strncmp( AIAMAG, fl_magic, SAIAMAG ) ) { /* read small variant */ - file_archscan_small( fd, archive, func, closure ); + collect_archive_content_small( fd, archive ); } #ifdef AR_HSZ_BIG else if ( !strncmp( AIAMAGBIG, fl_magic, SAIAMAG ) ) { /* read big variant */ - file_archscan_big( fd, archive, func, closure ); + collect_archive_content_big( fd, archive ); } #endif close( fd ); + + return 0; } #endif /* AIAMAG - RS6000 AIX */ diff --git a/src/engine/filevms.c b/src/engine/filevms.c new file mode 100644 index 000000000..e94e2634c --- /dev/null +++ b/src/engine/filevms.c @@ -0,0 +1,461 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + */ + + +#include "jam.h" +#include "filesys.h" + +#include "object.h" +#include "pathsys.h" +#include "strings.h" + + +#ifdef OS_VMS + +/* + * filevms.c - manipulate file names and scan directories on VMS. + * + * This implementation is based on POSIX-style path manipulation. + * + * VMS CTRL directly supports both POSIX- and native VMS-style path expressions, + * with the POSIX-to-VMS path translation performed internally by the same + * set of functions. For the most part such processing is transparent, with + * few differences mainly related to file versions (in POSIX mode only the recent + * version is visible). + * + * This should allow us to re-use fileunix.c implementation, + * excluding archive/library member processing. + * + * Thus in jam-files the path references can also remain POSIX/UNIX-style on all + * levels EXCEPT in actions scope, where these must be translated to the native + * VMS-style. This approach is somewhat similar to jam CYGWIN handling. + * + * + * External routines: + * file_archscan() - scan an archive for files + * file_mkdir() - create a directory + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * External routines called only via routines in filesys.c: + * file_collect_dir_content_() - collects directory content information + * file_dirscan_() - OS specific file_dirscan() implementation + * file_query_() - query information about a path from the OS + * file_collect_archive_content_() - collects information about archive members + * file_archivescan_() - OS specific file_archivescan() implementation + */ + +#include +#include + +#include /* needed for mkdir() */ +#include /* needed for read and close prototype */ + +#include +#define STRUCT_DIRENT struct dirent + + +void path_translate_to_os_( char const * f, string * file ); + +/* + * file_collect_dir_content_() - collects directory content information + */ + +int file_collect_dir_content_( file_info_t * const d ) +{ + LIST * files = L0; + PATHNAME f; + DIR * dd; + STRUCT_DIRENT * dirent; + string path[ 1 ]; + char const * dirstr; + + assert( d ); + assert( d->is_dir ); + assert( list_empty( d->files ) ); + + dirstr = object_str( d->name ); + + memset( (char *)&f, '\0', sizeof( f ) ); + f.f_dir.ptr = dirstr; + f.f_dir.len = strlen( dirstr ); + + if ( !*dirstr ) dirstr = "."; + + if ( !( dd = opendir( dirstr ) ) ) + return -1; + + string_new( path ); + while ( ( dirent = readdir( dd ) ) ) + { + OBJECT * name; + f.f_base.ptr = dirent->d_name + #ifdef old_sinix + - 2 /* Broken structure definition on sinix. */ + #endif + ; + f.f_base.len = strlen( f.f_base.ptr ); + + string_truncate( path, 0 ); + path_build( &f, path ); + name = object_new( path->value ); + /* Immediately stat the file to preserve invariants. */ + if ( file_query( name ) ) + files = list_push_back( files, name ); + else + object_free( name ); + } + string_free( path ); + + closedir( dd ); + + d->files = files; + return 0; +} + + +/* + * file_dirscan_() - OS specific file_dirscan() implementation + */ + +void file_dirscan_( file_info_t * const d, scanback func, void * closure ) +{ + assert( d ); + assert( d->is_dir ); + + /* Special case / : enter it */ + if ( !strcmp( object_str( d->name ), "/" ) ) + (*func)( closure, d->name, 1 /* stat()'ed */, &d->time ); +} + + +/* + * file_mkdir() - create a directory + */ + +int file_mkdir( char const * const path ) +{ + /* Explicit cast to remove const modifiers and avoid related compiler + * warnings displayed when using the intel compiler. + */ + return mkdir( (char *)path, 0777 ); +} + + +/* + * file_query_() - query information about a path from the OS + */ + +void file_query_( file_info_t * const info ) +{ + file_query_posix_( info ); +} + + +/* + * file_supported_fmt_resolution() - file modification timestamp resolution + * + * Returns the minimum file modification timestamp resolution supported by this + * Boost Jam implementation. File modification timestamp changes of less than + * the returned value might not be recognized. + * + * Does not take into consideration any OS or file system related restrictions. + * + * Return value 0 indicates that any value supported by the OS is also supported + * here. + */ + +void file_supported_fmt_resolution( timestamp * const t ) +{ + /* The current implementation does not support file modification timestamp + * resolution of less than one second. + */ + timestamp_init( t, 1, 0 ); +} + +/*------------------------------------------------------------------------------ +* VMS-specific processing: +* +*/ + +#include +#include +#include +#include +#include +#include +#include + +/* Supply missing prototypes for lbr$-routines*/ + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +int lbr$set_module( + void **, + unsigned long *, + struct dsc$descriptor_s *, + unsigned short *, + void * ); + +int lbr$open( void **, + struct dsc$descriptor_s *, + void *, + void *, + void *, + void *, + void * ); + +int lbr$ini_control( + void **, + unsigned long *, + unsigned long *, + void * ); + +int lbr$get_index( + void **, + unsigned long * const, + int (*func)( struct dsc$descriptor_s *, unsigned long *), + void * ); + +int lbr$search( + void **, + unsigned long * const, + unsigned short *, + int (*func)( struct dsc$descriptor_s *, unsigned long *), + unsigned long *); + +int lbr$close( + void ** ); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + + + +static void +file_cvttime( + unsigned int *curtime, + time_t *unixtime ) +{ + static const size_t divisor = 10000000; + static unsigned int bastim[2] = { 0x4BEB4000, 0x007C9567 }; /* 1/1/1970 */ + int delta[2], remainder; + + lib$subx( curtime, bastim, delta ); + lib$ediv( &divisor, delta, unixtime, &remainder ); +} + + +static void downcase_inplace( char * p ) +{ + for ( ; *p; ++p ) + *p = tolower( *p ); +} + + +static file_archive_info_t * m_archive = NULL; +static file_info_t * m_member_found = NULL; +static void * m_lbr_context = NULL; +static unsigned short * m_rfa_found = NULL; +static const unsigned long LBR_MODINDEX_NUM = 1, + LBR_SYMINDEX_NUM = 2; /* GST:global symbol table */ + + +static unsigned int set_archive_symbol( struct dsc$descriptor_s *symbol, + unsigned long *rfa ) +{ + file_info_t * member = m_member_found; + char buf[ MAXJPATH ] = { 0 }; + + strncpy(buf, symbol->dsc$a_pointer, symbol->dsc$w_length); + buf[ symbol->dsc$w_length ] = 0; + + member->files = list_push_back( member->files, object_new( buf ) ); + + return ( 1 ); /* continue */ +} + + +static unsigned int set_archive_member( struct dsc$descriptor_s *module, + unsigned long *rfa ) +{ + file_archive_info_t * archive = m_archive; + + static struct dsc$descriptor_s bufdsc = + {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL}; + + struct mhddef *mhd; + char filename[128] = { 0 }; + char buf[ MAXJPATH ] = { 0 }; + + int status; + time_t library_date; + + register int i; + register char *p; + + bufdsc.dsc$a_pointer = filename; + bufdsc.dsc$w_length = sizeof( filename ); + status = lbr$set_module( &m_lbr_context, rfa, &bufdsc, + &bufdsc.dsc$w_length, NULL ); + + if ( !(status & 1) ) + return ( 1 ); /* continue */ + + mhd = (struct mhddef *)filename; + + file_cvttime( &mhd->mhd$l_datim, &library_date ); + + /* strncpy( filename, module->dsc$a_pointer, module->dsc$w_length ); + */ + for ( i = 0, p = module->dsc$a_pointer; i < module->dsc$w_length; ++i, ++p ) + filename[ i ] = *p; + + filename[ i ] = '\0'; + + if ( strcmp( filename, "" ) != 0 ) + { + file_info_t * member = 0; + + /* Construct member's filename as lowercase "module.obj" */ + sprintf( buf, "%s.obj", filename ); + downcase_inplace( buf ); + archive->members = filelist_push_back( archive->members, object_new( buf ) ); + + member = filelist_back( archive->members ); + member->is_file = 1; + member->is_dir = 0; + member->exists = 0; + timestamp_init( &member->time, (time_t)library_date, 0 ); + + m_member_found = member; + m_rfa_found = rfa; + status = lbr$search(&m_lbr_context, &LBR_SYMINDEX_NUM, m_rfa_found, set_archive_symbol, NULL); + } + + return ( 1 ); /* continue */ +} + + + +void file_archscan( char const * arch, scanback func, void * closure ) +{ + OBJECT * path = object_new( arch ); + file_archive_info_t * archive = file_archive_query( path ); + + object_free( path ); + + if ( filelist_empty( archive->members ) ) + { + if ( DEBUG_BINDSCAN ) + out_printf( "scan archive %s\n", object_str( archive->file->name ) ); + + if ( file_collect_archive_content_( archive ) < 0 ) + return; + } + + /* Report the collected archive content. */ + { + FILELISTITER iter = filelist_begin( archive->members ); + FILELISTITER const end = filelist_end( archive->members ); + char buf[ MAXJPATH ]; + + for ( ; iter != end ; iter = filelist_next( iter ) ) + { + file_info_t * member_file = filelist_item( iter ); + LIST * symbols = member_file->files; + + /* Construct member path: 'archive-path(member-name)' + */ + sprintf( buf, "%s(%s)", + object_str( archive->file->name ), + object_str( member_file->name ) ); + { + OBJECT * const member = object_new( buf ); + (*func)( closure, member, 1 /* time valid */, &member_file->time ); + object_free( member ); + } + } + } +} + + +/* + * file_archivescan_() - OS specific file_archivescan() implementation + */ +void file_archivescan_( file_archive_info_t * const archive, archive_scanback func, + void * closure ) +{ +} + + +/* + * file_collect_archive_content_() - collects information about archive members + */ + +int file_collect_archive_content_( file_archive_info_t * const archive ) +{ + unsigned short rfa[3]; + + static struct dsc$descriptor_s library = + {0, DSC$K_DTYPE_T, DSC$K_CLASS_S, NULL}; + + unsigned long lfunc = LBR$C_READ; + unsigned long typ = LBR$C_TYP_UNK; + + register int status; + string buf[ 1 ]; + char vmspath[ MAXJPATH ] = { 0 }; + + m_archive = archive; + + if ( ! filelist_empty( archive->members ) ) filelist_free( archive->members ); + + /* Translate path to VMS + */ + string_new( buf ); + path_translate_to_os_( object_str( archive->file->name ), buf ); + strcpy( vmspath, buf->value ); + string_free( buf ); + + + status = lbr$ini_control( &m_lbr_context, &lfunc, &typ, NULL ); + if ( !( status & 1 ) ) + return -1; + + library.dsc$a_pointer = vmspath; + library.dsc$w_length = strlen( vmspath ); + + status = lbr$open( &m_lbr_context, &library, NULL, NULL, NULL, NULL, NULL ); + if ( !( status & 1 ) ) + return -1; + + /* Scan main index for modules. + * For each module search symbol-index to collect module's symbols. + */ + status = lbr$get_index( &m_lbr_context, &LBR_MODINDEX_NUM, set_archive_member, NULL ); + + if ( !( status & 1 ) ) + return -1; + + + (void) lbr$close( &m_lbr_context ); + + return 0; +} + +#endif /* OS_VMS */ + diff --git a/src/engine/function.c b/src/engine/function.c index d3b9849f5..53fe1fe69 100644 --- a/src/engine/function.c +++ b/src/engine/function.c @@ -20,21 +20,13 @@ #include "rules.h" #include "search.h" #include "variable.h" +#include "output.h" #include #include #include #include -#ifdef OS_CYGWIN -# include -# include -# ifdef CYGWIN_VERSION_CYGWIN_CONV -# include -# endif -# include -#endif - int glob( char const * s, char const * c ); void backtrace( FRAME * ); void backtrace_line( FRAME * ); @@ -121,6 +113,7 @@ void backtrace_line( FRAME * ); #define INSTR_OUTPUT_STRINGS 55 #define INSTR_DEBUG_LINE 67 +#define INSTR_FOR_POP 70 typedef struct instruction { @@ -452,7 +445,7 @@ static LIST * function_call_rule( JAM_FUNCTION * function, FRAME * frame, if ( list_empty( first ) ) { backtrace_line( frame ); - printf( "warning: rulename %s expands to empty string\n", unexpanded ); + out_printf( "warning: rulename %s expands to empty string\n", unexpanded ); backtrace( frame ); list_free( first ); for ( i = 0; i < n_args; ++i ) @@ -504,11 +497,11 @@ static LIST * function_call_member_rule( JAM_FUNCTION * function, FRAME * frame, frame->file = file; frame->line = line; - + if ( list_empty( first ) ) { backtrace_line( frame ); - printf( "warning: object is empty\n" ); + out_printf( "warning: object is empty\n" ); backtrace( frame ); list_free( first ); @@ -754,74 +747,30 @@ static void var_edit_file( char const * in, string * out, VAR_EDITS * edits ) string_append( out, in ); } + /* - * var_edit_cyg2win() - conversion of a cygwin to a Windows path. - * - * FIXME: skip grist + * var_edit_translate_path() - translate path to os native format. */ -#ifdef OS_CYGWIN -static void var_edit_cyg2win( string * out, size_t pos, VAR_EDITS * edits ) +static void var_edit_translate_path( string * out, size_t pos, VAR_EDITS * edits ) { if ( edits->to_windows ) { - #ifdef CYGWIN_VERSION_CYGWIN_CONV - /* Use new Cygwin API added with Cygwin 1.7. Old one had no error - * handling and has been deprecated. - */ - char * dynamicBuffer = 0; - char buffer[ MAX_PATH + 1001 ]; - char const * result = buffer; - cygwin_conv_path_t const conv_type = CCP_POSIX_TO_WIN_A | CCP_RELATIVE; - ssize_t const apiResult = cygwin_conv_path( conv_type, out->value + pos, - buffer, sizeof( buffer ) / sizeof( *buffer ) ); - assert( apiResult == 0 || apiResult == -1 ); - assert( apiResult || strlen( result ) < sizeof( buffer ) / sizeof( - *buffer ) ); - if ( apiResult ) - { - result = 0; - if ( errno == ENOSPC ) - { - ssize_t const size = cygwin_conv_path( conv_type, out->value + - pos, NULL, 0 ); - assert( size >= -1 ); - if ( size > 0 ) - { - dynamicBuffer = (char *)BJAM_MALLOC_ATOMIC( size ); - if ( dynamicBuffer ) - { - ssize_t const apiResult = cygwin_conv_path( conv_type, - out->value + pos, dynamicBuffer, size ); - assert( apiResult == 0 || apiResult == -1 ); - if ( !apiResult ) - { - result = dynamicBuffer; - assert( strlen( result ) < size ); - } - } - } - } - } - #else /* CYGWIN_VERSION_CYGWIN_CONV */ - /* Use old Cygwin API deprecated with Cygwin 1.7. */ - char result[ MAX_PATH + 1 ]; - cygwin_conv_to_win32_path( out->value + pos, result ); - assert( strlen( result ) <= MAX_PATH ); - #endif /* CYGWIN_VERSION_CYGWIN_CONV */ - if ( result ) + string result[ 1 ]; + int translated; + + /* Translate path to os native format. */ + translated = path_translate_to_os( out->value + pos, result ); + if ( translated ) { string_truncate( out, pos ); - string_append( out, result ); + string_append( out, result->value ); edits->to_slashes = 0; } - #ifdef CYGWIN_VERSION_CYGWIN_CONV - if ( dynamicBuffer ) - BJAM_FREE( dynamicBuffer ); - #endif + + string_free( result ); } } -#endif /* OS_CYGWIN */ /* @@ -830,8 +779,8 @@ static void var_edit_cyg2win( string * out, size_t pos, VAR_EDITS * edits ) static void var_edit_shift( string * out, size_t pos, VAR_EDITS * edits ) { -#ifdef OS_CYGWIN - var_edit_cyg2win( out, pos, edits ); +#if defined( OS_CYGWIN ) || defined( OS_VMS ) + var_edit_translate_path( out, pos, edits ); #endif if ( edits->upshift || edits->downshift || edits->to_slashes ) @@ -1316,7 +1265,7 @@ static void dynamic_array_push_impl( struct dynamic_array * const array, #define dynamic_array_push( array, value ) (dynamic_array_push_impl(array, &value, sizeof(value))) #define dynamic_array_at( type, array, idx ) (((type *)(array)->data)[idx]) - +#define dynamic_array_pop( array ) (--(array)->size) /* * struct compiler @@ -1328,6 +1277,16 @@ struct label_info struct dynamic_array uses[ 1 ]; }; +#define LOOP_INFO_BREAK 0 +#define LOOP_INFO_CONTINUE 1 + +struct loop_info +{ + int type; + int label; + int cleanup_depth; +}; + struct stored_rule { OBJECT * name; @@ -1344,6 +1303,8 @@ typedef struct compiler struct dynamic_array labels[ 1 ]; struct dynamic_array rules[ 1 ]; struct dynamic_array actions[ 1 ]; + struct dynamic_array cleanups[ 1 ]; + struct dynamic_array loop_scopes[ 1 ]; } compiler; static void compiler_init( compiler * c ) @@ -1353,6 +1314,8 @@ static void compiler_init( compiler * c ) dynamic_array_init( c->labels ); dynamic_array_init( c->rules ); dynamic_array_init( c->actions ); + dynamic_array_init( c->cleanups ); + dynamic_array_init( c->loop_scopes ); } static void compiler_free( compiler * c ) @@ -1366,6 +1329,8 @@ static void compiler_free( compiler * c ) dynamic_array_free( c->labels ); dynamic_array_free( c->constants ); dynamic_array_free( c->code ); + dynamic_array_free( c->cleanups ); + dynamic_array_free( c->loop_scopes ); } static void compile_emit_instruction( compiler * c, instruction instr ) @@ -1431,6 +1396,82 @@ static int compile_emit_constant( compiler * c, OBJECT * value ) return c->constants->size - 1; } +static void compile_push_cleanup( compiler * c, unsigned int op_code, int arg ) +{ + instruction instr; + instr.op_code = op_code; + instr.arg = arg; + dynamic_array_push( c->cleanups, instr ); +} + +static void compile_pop_cleanup( compiler * c ) +{ + dynamic_array_pop( c->cleanups ); +} + +static void compile_emit_cleanups( compiler * c, int end ) +{ + int i; + for ( i = c->cleanups->size; --i >= end; ) + { + compile_emit_instruction( c, dynamic_array_at( instruction, c->cleanups, i ) ); + } +} + +static void compile_emit_loop_jump( compiler * c, int type ) +{ + struct loop_info * info = NULL; + int i; + for ( i = c->loop_scopes->size; --i >= 0; ) + { + struct loop_info * elem = &dynamic_array_at( struct loop_info, c->loop_scopes, i ); + if ( elem->type == type ) + { + info = elem; + break; + } + } + if ( info == NULL ) + { + printf( "warning: ignoring break statement used outside of loop\n" ); + return; + } + compile_emit_cleanups( c, info->cleanup_depth ); + compile_emit_branch( c, INSTR_JUMP, info->label ); +} + +static void compile_push_break_scope( compiler * c, int label ) +{ + struct loop_info info; + info.type = LOOP_INFO_BREAK; + info.label = label; + info.cleanup_depth = c->cleanups->size; + dynamic_array_push( c->loop_scopes, info ); +} + +static void compile_push_continue_scope( compiler * c, int label ) +{ + struct loop_info info; + info.type = LOOP_INFO_CONTINUE; + info.label = label; + info.cleanup_depth = c->cleanups->size; + dynamic_array_push( c->loop_scopes, info ); +} + +static void compile_pop_break_scope( compiler * c ) +{ + assert( c->loop_scopes->size > 0 ); + assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_BREAK ); + dynamic_array_pop( c->loop_scopes ); +} + +static void compile_pop_continue_scope( compiler * c ) +{ + assert( c->loop_scopes->size > 0 ); + assert( dynamic_array_at( struct loop_info, c->loop_scopes, c->loop_scopes->size - 1 ).type == LOOP_INFO_CONTINUE ); + dynamic_array_pop( c->loop_scopes ); +} + static int compile_emit_rule( compiler * c, OBJECT * name, PARSE * parse, int num_arguments, struct arg_list * arguments, int local ) { @@ -1994,7 +2035,7 @@ static int current_line; static void parse_error( char const * message ) { - printf( "%s:%d: %s\n", current_file, current_line, message ); + out_printf( "%s:%d: %s\n", current_file, current_line, message ); } @@ -2403,7 +2444,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) int f = compile_new_label( c ); int end = compile_new_label( c ); - printf( "%s:%d: Conditional used as list (check operator " + out_printf( "%s:%d: Conditional used as list (check operator " "precedence).\n", object_str( parse->file ), parse->line ); /* Emit the condition */ @@ -2422,6 +2463,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) int var = compile_emit_constant( c, parse->string ); int top = compile_new_label( c ); int end = compile_new_label( c ); + int continue_ = compile_new_label( c ); /* * Evaluate the list. @@ -2434,6 +2476,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) compile_emit( c, INSTR_PUSH_EMPTY, 0 ); compile_emit( c, INSTR_PUSH_LOCAL, var ); compile_emit( c, INSTR_SWAP, 1 ); + compile_push_cleanup( c, INSTR_POP_LOCAL, var ); } compile_emit( c, INSTR_FOR_INIT, 0 ); @@ -2441,14 +2484,26 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) compile_emit_branch( c, INSTR_FOR_LOOP, end ); compile_emit( c, INSTR_SET, var ); + compile_push_break_scope( c, end ); + compile_push_cleanup( c, INSTR_FOR_POP, 0 ); + compile_push_continue_scope( c, continue_ ); + /* Run the loop body */ compile_parse( parse->right, c, RESULT_NONE ); + compile_pop_continue_scope( c ); + compile_pop_cleanup( c ); + compile_pop_break_scope( c ); + + compile_set_label( c, continue_ ); compile_emit_branch( c, INSTR_JUMP, top ); compile_set_label( c, end ); if ( parse->num ) + { + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_LOCAL, var ); + } adjust_result( c, RESULT_NONE, result_location); } @@ -2479,6 +2534,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) : RESULT_RETURN; int test = compile_new_label( c ); int top = compile_new_label( c ); + int end = compile_new_label( c ); /* Make sure that we return an empty list if the loop runs zero times. */ adjust_result( c, RESULT_NONE, nested_result ); @@ -2486,10 +2542,15 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) compile_emit_branch( c, INSTR_JUMP, test ); compile_set_label( c, top ); /* Emit the loop body. */ + compile_push_break_scope( c, end ); + compile_push_continue_scope( c, test ); compile_parse( parse->right, c, nested_result ); + compile_pop_continue_scope( c ); + compile_pop_break_scope( c ); /* Emit the condition. */ compile_set_label( c, test ); compile_condition( parse->left, c, 1, top ); + compile_set_label( c, end ); adjust_result( c, nested_result, result_location ); } @@ -2507,7 +2568,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) : RESULT_RETURN; compile_parse( parse->left, c, RESULT_STACK ); compile_emit( c, INSTR_PUSH_MODULE, 0 ); + compile_push_cleanup( c, INSTR_POP_MODULE, 0 ); compile_parse( parse->right, c, nested_result ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_MODULE, 0 ); adjust_result( c, nested_result, result_location ); } @@ -2521,8 +2584,10 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) else compile_emit( c, INSTR_PUSH_EMPTY, 0 ); compile_emit( c, INSTR_CLASS, 0 ); + compile_push_cleanup( c, INSTR_POP_MODULE, 0 ); compile_parse( parse->right, c, RESULT_NONE ); compile_emit( c, INSTR_BIND_MODULE_VARIABLES, 0 ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_MODULE, 0 ); adjust_result( c, RESULT_NONE, result_location ); @@ -2572,7 +2637,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) var_parse_group_free( group ); compile_parse( parse->right, c, RESULT_STACK ); compile_emit( c, INSTR_PUSH_LOCAL, name ); + compile_push_cleanup( c, INSTR_POP_LOCAL, name ); compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_LOCAL, name ); } else @@ -2581,7 +2648,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) var_parse_group_free( group ); compile_parse( parse->right, c, RESULT_STACK ); compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 ); + compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 ); compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 ); } } @@ -2590,7 +2659,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) compile_parse( parse->left, c, RESULT_STACK ); compile_parse( parse->right, c, RESULT_STACK ); compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 ); + compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 ); compile_parse( parse->third, c, nested_result ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_LOCAL_GROUP, 0 ); } adjust_result( c, nested_result, result_location ); @@ -2638,7 +2709,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) int end = compile_new_label( c ); compile_parse( parse->left, c, RESULT_STACK ); compile_emit_branch( c, INSTR_PUSH_ON, end ); + compile_push_cleanup( c, INSTR_POP_ON, 0 ); var_parse_group_compile( group, c ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_ON, 0 ); compile_set_label( c, end ); } @@ -2649,7 +2722,9 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) int end = compile_new_label( c ); compile_parse( parse->left, c, RESULT_STACK ); compile_emit_branch( c, INSTR_PUSH_ON, end ); + compile_push_cleanup( c, INSTR_POP_ON, 0 ); compile_parse( parse->right, c, RESULT_STACK ); + compile_pop_cleanup( c ); compile_emit( c, INSTR_POP_ON, 0 ); compile_set_label( c, end ); } @@ -2815,6 +2890,20 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location ) adjust_result( c, RESULT_NONE, result_location ); compile_set_label( c, switch_end ); } + else if ( parse->type == PARSE_RETURN ) + { + compile_parse( parse->left, c, RESULT_RETURN ); + compile_emit_cleanups( c, 0 ); + compile_emit( c, INSTR_RETURN, 0 ); + } + else if ( parse->type == PARSE_BREAK ) + { + compile_emit_loop_jump( c, LOOP_INFO_BREAK ); + } + else if ( parse->type == PARSE_CONTINUE ) + { + compile_emit_loop_jump( c, LOOP_INFO_CONTINUE ); + } else if ( parse->type == PARSE_NULL ) adjust_result( c, RESULT_NONE, result_location ); else @@ -2931,17 +3020,17 @@ static void argument_error( char const * message, FUNCTION * procedure, extern void print_source_line( FRAME * ); LOL * actual = frame->args; backtrace_line( frame->prev ); - printf( "*** argument error\n* rule %s ( ", frame->rulename ); + out_printf( "*** argument error\n* rule %s ( ", frame->rulename ); argument_list_print( procedure->formal_arguments, procedure->num_formal_arguments ); - printf( " )\n* called with: ( " ); + out_printf( " )\n* called with: ( " ); lol_print( actual ); - printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" ); + out_printf( " )\n* %s %s\n", message, arg ? object_str ( arg ) : "" ); function_location( procedure, &frame->file, &frame->line ); print_source_line( frame ); - printf( "see definition of rule '%s' being called\n", frame->rulename ); + out_printf( "see definition of rule '%s' being called\n", frame->rulename ); backtrace( frame->prev ); - exit( 1 ); + exit( EXITBAD ); } static void type_check_range( OBJECT * type_name, LISTITER iter, LISTITER end, @@ -3232,9 +3321,9 @@ static void argument_compiler_add( struct argument_compiler * c, OBJECT * arg, if ( is_type_name( object_str( arg ) ) ) { - printf( "%s:%d: missing argument name before type name: %s\n", + err_printf( "%s:%d: missing argument name before type name: %s\n", object_str( file ), line, object_str( arg ) ); - exit( 1 ); + exit( EXITBAD ); } c->arg.arg_name = object_copy( arg ); @@ -3280,9 +3369,9 @@ static struct arg_list arg_compile_impl( struct argument_compiler * c, case ARGUMENT_COMPILER_DONE: break; case ARGUMENT_COMPILER_FOUND_TYPE: - printf( "%s:%d: missing argument name after type name: %s\n", + err_printf( "%s:%d: missing argument name after type name: %s\n", object_str( file ), line, object_str( c->arg.type_name ) ); - exit( 1 ); + exit( EXITBAD ); case ARGUMENT_COMPILER_FOUND_OBJECT: dynamic_array_push( c->args, c->arg ); break; @@ -3404,19 +3493,19 @@ static void argument_list_print( struct arg_list * args, int num_args ) for ( i = 0; i < num_args; ++i ) { int j; - if ( i ) printf( " : " ); + if ( i ) out_printf( " : " ); for ( j = 0; j < args[ i ].size; ++j ) { struct argument * formal_arg = &args[ i ].args[ j ]; - if ( j ) printf( " " ); + if ( j ) out_printf( " " ); if ( formal_arg->type_name ) - printf( "%s ", object_str( formal_arg->type_name ) ); - printf( "%s", object_str( formal_arg->arg_name ) ); + out_printf( "%s ", object_str( formal_arg->type_name ) ); + out_printf( "%s", object_str( formal_arg->arg_name ) ); switch ( formal_arg->flags ) { - case ARG_OPTIONAL: printf( " ?" ); break; - case ARG_PLUS: printf( " +" ); break; - case ARG_STAR: printf( " *" ); break; + case ARG_OPTIONAL: out_printf( " ?" ); break; + case ARG_PLUS: out_printf( " +" ); break; + case ARG_STAR: out_printf( " *" ); break; } } } @@ -3960,6 +4049,13 @@ LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) break; } + case INSTR_FOR_POP: + { + stack_deallocate( s, sizeof( LISTITER ) ); + list_free( stack_pop( s ) ); + break; + } + /* * Switch */ @@ -4008,7 +4104,7 @@ LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) frame->file = function->file; frame->line = function->line; backtrace_line( frame ); - printf( "error: stack check failed.\n" ); + out_printf( "error: stack check failed.\n" ); backtrace( frame ); assert( saved_stack == s->data ); } @@ -4614,17 +4710,46 @@ LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) { int err_redir = strcmp( "STDERR", out ) == 0; string result[ 1 ]; + tmp_filename = path_tmpfile(); - string_new( result ); + + /* Construct os-specific cat command. */ + { + char * command = "cat"; + char * quote = "\""; + char * redirect = "1>&2"; + #ifdef OS_NT - string_append( result, "type \"" ); - #else - string_append( result, "cat \"" ); + command = "type"; + quote = "\""; + #elif defined( OS_VMS ) + command = "pipe type"; + quote = ""; + + /* Get tmp file name is os-format. */ + { + string os_filename[ 1 ]; + + string_new( os_filename ); + path_translate_to_os( object_str( tmp_filename ), os_filename ); + object_free( tmp_filename ); + tmp_filename = object_new( os_filename->value ); + string_free( os_filename ); + } #endif - string_append( result, object_str( tmp_filename ) ); - string_push_back( result, '\"' ); - if ( err_redir ) - string_append( result, " 1>&2" ); + + string_new( result ); + string_append( result, command ); + string_append( result, " " ); + string_append( result, quote ); + string_append( result, object_str( tmp_filename ) ); + string_append( result, quote ); + if ( err_redir ) + { + string_append( result, " " ); + string_append( result, redirect ); + } + } /* Replace STDXXX with the temporary file. */ list_free( stack_pop( s ) ); @@ -4653,16 +4778,16 @@ LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) if ( !out_file ) { - printf( "failed to write output file '%s'!\n", + err_printf( "failed to write output file '%s'!\n", out_name->value ); exit( EXITBAD ); } string_free( out_name ); } - if ( out_debug ) printf( "\nfile %s\n", out ); + if ( out_debug ) out_printf( "\nfile %s\n", out ); if ( out_file ) fputs( buf->value, out_file ); - if ( out_debug ) fputs( buf->value, stdout ); + if ( out_debug ) out_puts( buf->value ); if ( out_file ) { fflush( out_file ); @@ -4672,7 +4797,7 @@ LIST * function_run( FUNCTION * function_, FRAME * frame, STACK * s ) if ( tmp_filename ) object_free( tmp_filename ); - if ( out_debug ) fputc( '\n', stdout ); + if ( out_debug ) out_putc( '\n' ); break; } @@ -4926,7 +5051,7 @@ static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame ) { OBJECT * s = python_to_string( PyList_GetItem( py_result, i ) ); if ( !s ) - fprintf( stderr, + err_printf( "Non-string object returned by Python call.\n" ); else result = list_push_back( result, s ); @@ -4957,7 +5082,7 @@ static LIST * call_python_function( PYTHON_FUNCTION * function, FRAME * frame ) else { PyErr_Print(); - fprintf( stderr, "Call failed\n" ); + err_printf( "Call failed\n" ); } return result; diff --git a/src/engine/hash.c b/src/engine/hash.c index 36f836668..772087a22 100644 --- a/src/engine/hash.c +++ b/src/engine/hash.c @@ -20,6 +20,7 @@ #include "hash.h" #include "compile.h" +#include "output.h" #include @@ -365,7 +366,7 @@ void hashstats_add( struct hashstats * stats, struct hash * hp ) void hashstats_print( struct hashstats * stats, char const * name ) { - printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n", + out_printf( "%s table: %d+%d+%d (%dK+%luK+%luK) items+table+hash, %f density\n", name, stats->count, stats->num_items, diff --git a/src/engine/hcache.c b/src/engine/hcache.c index 3cf15f776..d440d5c02 100644 --- a/src/engine/hcache.c +++ b/src/engine/hcache.c @@ -41,6 +41,7 @@ #include "search.h" #include "timestamp.h" #include "variable.h" +#include "output.h" typedef struct hcachedata HCACHEDATA ; @@ -218,7 +219,7 @@ void hcache_init() record_type = read_netstring( f ); if ( !record_type ) { - fprintf( stderr, "invalid %s\n", hcachename ); + err_printf( "invalid %s\n", hcachename ); goto cleanup; } if ( !strcmp( object_str( record_type ), CACHE_RECORD_END ) ) @@ -228,7 +229,7 @@ void hcache_init() } if ( strcmp( object_str( record_type ), CACHE_RECORD_HEADER ) ) { - fprintf( stderr, "invalid %s with record separator <%s>\n", + err_printf( "invalid %s with record separator <%s>\n", hcachename, record_type ? object_str( record_type ) : "" ); goto cleanup; } @@ -242,7 +243,7 @@ void hcache_init() if ( !cachedata.boundname || !time_secs_str || !time_nsecs_str || !age_str || !includes_count_str ) { - fprintf( stderr, "invalid %s\n", hcachename ); + err_printf( "invalid %s\n", hcachename ); goto cleanup; } @@ -256,7 +257,7 @@ void hcache_init() OBJECT * const s = read_netstring( f ); if ( !s ) { - fprintf( stderr, "invalid %s\n", hcachename ); + err_printf( "invalid %s\n", hcachename ); list_free( l ); goto cleanup; } @@ -267,7 +268,7 @@ void hcache_init() hdrscan_count_str = read_netstring( f ); if ( !hdrscan_count_str ) { - fprintf( stderr, "invalid %s\n", hcachename ); + err_printf( "invalid %s\n", hcachename ); goto cleanup; } @@ -277,7 +278,7 @@ void hcache_init() OBJECT * const s = read_netstring( f ); if ( !s ) { - fprintf( stderr, "invalid %s\n", hcachename ); + err_printf( "invalid %s\n", hcachename ); list_free( l ); goto cleanup; } @@ -297,7 +298,7 @@ void hcache_init() } else { - fprintf( stderr, "can not insert header cache item, bailing on %s" + err_printf( "can not insert header cache item, bailing on %s" "\n", hcachename ); goto cleanup; } @@ -332,7 +333,7 @@ cleanup: } if ( DEBUG_HEADER ) - printf( "hcache read from file %s\n", hcachename ); + out_printf( "hcache read from file %s\n", hcachename ); bail: if ( version ) @@ -406,7 +407,7 @@ void hcache_done() write_netstring( f, CACHE_RECORD_END ); if ( DEBUG_HEADER ) - printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n", + out_printf( "hcache written to %s. %d dependencies, %.0f%% hit rate\n", hcachename, header_count, queries ? 100.0 * hits / queries : 0 ); fclose ( f ); @@ -456,13 +457,13 @@ LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan ) { if ( DEBUG_HEADER ) { - printf( "HDRSCAN out of date in cache for %s\n", + out_printf( "HDRSCAN out of date in cache for %s\n", object_str( t->boundname ) ); - printf(" real : "); + out_printf(" real : "); list_print( hdrscan ); - printf( "\n cached: " ); + out_printf( "\n cached: " ); list_print( c->hdrscan ); - printf( "\n" ); + out_printf( "\n" ); } list_free( c->includes ); @@ -473,7 +474,7 @@ LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan ) else { if ( DEBUG_HEADER ) - printf( "using header cache for %s\n", object_str( + out_printf( "using header cache for %s\n", object_str( t->boundname ) ); c->age = 0; ++hits; @@ -483,7 +484,7 @@ LIST * hcache( TARGET * t, int rec, regexp * re[], LIST * hdrscan ) else { if ( DEBUG_HEADER ) - printf ("header cache out of date for %s\n", object_str( + out_printf ("header cache out of date for %s\n", object_str( t->boundname ) ); list_free( c->includes ); list_free( c->hdrscan ); diff --git a/src/engine/hdrmacro.c b/src/engine/hdrmacro.c index eb4fe90f4..a8be7787d 100644 --- a/src/engine/hdrmacro.c +++ b/src/engine/hdrmacro.c @@ -39,6 +39,7 @@ #include "strings.h" #include "subst.h" #include "variable.h" +#include "output.h" /* this type is used to store a dictionary of file header macros */ @@ -64,7 +65,7 @@ void macro_headers( TARGET * t ) char buf[ 1024 ]; if ( DEBUG_HEADER ) - printf( "macro header scan for %s\n", object_str( t->name ) ); + out_printf( "macro header scan for %s\n", object_str( t->name ) ); /* This regexp is used to detect lines of the form * "#define MACRO <....>" or "#define MACRO "....." @@ -96,7 +97,7 @@ void macro_headers( TARGET * t ) ( (char *)re->endp[ 2 ] )[ 0 ] = '\0'; if ( DEBUG_HEADER ) - printf( "macro '%s' used to define filename '%s' in '%s'\n", + out_printf( "macro '%s' used to define filename '%s' in '%s'\n", re->startp[ 1 ], re->startp[ 2 ], object_str( t->boundname ) ); @@ -131,7 +132,7 @@ OBJECT * macro_header_get( OBJECT * macro_name ) header_macros_hash, macro_name ) ) ) { if ( DEBUG_HEADER ) - printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name + out_printf( "### macro '%s' evaluated to '%s'\n", object_str( macro_name ), object_str( v->filename ) ); return v->filename; } diff --git a/src/engine/headers.c b/src/engine/headers.c index 0d9558d5d..e653abcfa 100644 --- a/src/engine/headers.c +++ b/src/engine/headers.c @@ -35,6 +35,7 @@ #include "rules.h" #include "subst.h" #include "variable.h" +#include "output.h" #ifdef OPT_HEADER_CACHE_EXT # include "hcache.h" @@ -72,7 +73,7 @@ void headers( TARGET * t ) return; if ( DEBUG_HEADER ) - printf( "header scan %s\n", object_str( t->name ) ); + out_printf( "header scan %s\n", object_str( t->name ) ); /* Compile all regular expressions in HDRSCAN */ iter = list_begin( hdrscan ); @@ -127,8 +128,8 @@ LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) ++count; if ( ( ( count == 100 ) || !( count % 1000 ) ) && DEBUG_MAKE ) { - printf( "...patience...\n" ); - fflush( stdout ); + out_printf( "...patience...\n" ); + out_flush(); } #endif @@ -153,7 +154,7 @@ LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) { ( (char *)re[ i ]->endp[ 1 ] )[ 0 ] = '\0'; if ( DEBUG_HEADER ) - printf( "header found: %s\n", re[ i ]->startp[ 1 ] ); + out_printf( "header found: %s\n", re[ i ]->startp[ 1 ] ); l = list_push_back( l, object_new( re[ i ]->startp[ 1 ] ) ); } @@ -166,7 +167,7 @@ LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) ( (char *)re_macros->endp[ 1 ] )[ 0 ] = '\0'; if ( DEBUG_HEADER ) - printf( "macro header found: %s", re_macros->startp[ 1 ] ); + out_printf( "macro header found: %s", re_macros->startp[ 1 ] ); macro_name = object_new( re_macros->startp[ 1 ] ); header_filename = macro_header_get( macro_name ); @@ -174,14 +175,14 @@ LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) if ( header_filename ) { if ( DEBUG_HEADER ) - printf( " resolved to '%s'\n", object_str( header_filename ) + out_printf( " resolved to '%s'\n", object_str( header_filename ) ); l = list_push_back( l, object_copy( header_filename ) ); } else { if ( DEBUG_HEADER ) - printf( " ignored !!\n" ); + out_printf( " ignored !!\n" ); } } } @@ -193,5 +194,5 @@ LIST * headers1( LIST * l, OBJECT * file, int rec, regexp * re[] ) void regerror( char const * s ) { - printf( "re error %s\n", s ); + out_printf( "re error %s\n", s ); } diff --git a/src/engine/jam.c b/src/engine/jam.c index 52ef441d2..c0311fb04 100644 --- a/src/engine/jam.c +++ b/src/engine/jam.c @@ -166,6 +166,12 @@ static char * othersyms[] = { OSMAJOR, OSMINOR, OSPLAT, JAMVERSYM, 0 }; # endif #endif + +#ifdef OS_VMS +# define use_environ arg_environ +#endif + + /* on Win32-LCC */ #if defined( OS_NT ) && defined( __LCC__ ) # define use_environ _environ @@ -207,6 +213,7 @@ int anyhow = 0; extern PyObject * bjam_variable ( PyObject * self, PyObject * args ); extern PyObject * bjam_backtrace ( PyObject * self, PyObject * args ); extern PyObject * bjam_caller ( PyObject * self, PyObject * args ); + int python_optimize = 1; /* Set Python optimzation on by default */ #endif void regex_done(); @@ -215,23 +222,26 @@ char const * saved_argv0; static void usage( const char * progname ) { - printf( "\nusage: %s [ options ] targets...\n\n", progname ); + err_printf("\nusage: %s [ options ] targets...\n\n", progname); - printf( "-a Build all targets, even if they are current.\n" ); - printf( "-dx Set the debug level to x (0-9).\n" ); - printf( "-fx Read x instead of Jambase.\n" ); - /* printf( "-g Build from newest sources first.\n" ); */ - printf( "-jx Run up to x shell commands concurrently.\n" ); - printf( "-lx Limit actions to x number of seconds after which they are stopped.\n" ); - printf( "-mx Maximum target output saved (kb), default is to save all output.\n" ); - printf( "-n Don't actually execute the updating actions.\n" ); - printf( "-ox Write the updating actions to file x.\n" ); - printf( "-px x=0, pipes action stdout and stderr merged into action output.\n" ); - printf( "-q Quit quickly as soon as a target fails.\n" ); - printf( "-sx=y Set variable x=y, overriding environment.\n" ); - printf( "-tx Rebuild x, even if it is up-to-date.\n" ); - printf( "-v Print the version of jam and exit.\n" ); - printf( "--x Option is ignored.\n\n" ); + err_printf("-a Build all targets, even if they are current.\n"); + err_printf("-dx Set the debug level to x (0-9).\n"); + err_printf("-fx Read x instead of Jambase.\n"); + /* err_printf( "-g Build from newest sources first.\n" ); */ + err_printf("-jx Run up to x shell commands concurrently.\n"); + err_printf("-lx Limit actions to x number of seconds after which they are stopped.\n"); + err_printf("-mx Maximum target output saved (kb), default is to save all output.\n"); + err_printf("-n Don't actually execute the updating actions.\n"); + err_printf("-ox Mirror all output to file x.\n"); + err_printf("-px x=0, pipes action stdout and stderr merged into action output.\n"); + err_printf("-q Quit quickly as soon as a target fails.\n"); + err_printf("-sx=y Set variable x=y, overriding environment.\n"); + err_printf("-tx Rebuild x, even if it is up-to-date.\n"); + err_printf("-v Print the version of jam and exit.\n"); +#ifdef HAVE_PYTHON + err_printf("-z Disable Python Optimization and enable asserts\n"); +#endif + err_printf("--x Option is ignored.\n\n"); exit( EXITBAD ); } @@ -324,7 +334,13 @@ int main( int argc, char * * argv, char * * arg_environ ) --argc; ++argv; - if ( getoptions( argc, argv, "-:l:m:d:j:p:f:gs:t:ano:qv", optv ) < 0 ) + #ifdef HAVE_PYTHON + #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qvz" + #else + #define OPTSTRING "-:l:m:d:j:p:f:gs:t:ano:qv" + #endif + + if ( getoptions( argc, argv, OPTSTRING, optv ) < 0 ) { usage( progname ); } @@ -332,13 +348,13 @@ int main( int argc, char * * argv, char * * arg_environ ) /* Version info. */ if ( ( s = getoptval( optv, 'v', 0 ) ) ) { - printf( "Boost.Jam Version %s. %s.\n", VERSION, OSMINOR ); - printf( " Copyright 1993-2002 Christopher Seiwald and Perforce " + out_printf( "Boost.Jam Version %s. %s.\n", VERSION, OSMINOR ); + out_printf( " Copyright 1993-2002 Christopher Seiwald and Perforce " "Software, Inc.\n" ); - printf( " Copyright 2001 David Turner.\n" ); - printf( " Copyright 2001-2004 David Abrahams.\n" ); - printf( " Copyright 2002-2008 Rene Rivera.\n" ); - printf( " Copyright 2003-2008 Vladimir Prus.\n" ); + out_printf( " Copyright 2001 David Turner.\n" ); + out_printf( " Copyright 2001-2004 David Abrahams.\n" ); + out_printf( " Copyright 2002-2015 Rene Rivera.\n" ); + out_printf( " Copyright 2003-2015 Vladimir Prus.\n" ); return EXITOK; } @@ -357,7 +373,7 @@ int main( int argc, char * * argv, char * * arg_environ ) globs.pipe_action = atoi( s ); if ( globs.pipe_action < 0 || 3 < globs.pipe_action ) { - printf( "Invalid pipe descriptor '%d', valid values are -p[0..3]." + err_printf( "Invalid pipe descriptor '%d', valid values are -p[0..3]." "\n", globs.pipe_action ); exit( EXITBAD ); } @@ -374,7 +390,7 @@ int main( int argc, char * * argv, char * * arg_environ ) globs.jobs = atoi( s ); if ( globs.jobs < 1 || globs.jobs > MAXJOBS ) { - printf( "Invalid value for the '-j' option, valid values are 1 " + err_printf( "Invalid value for the '-j' option, valid values are 1 " "through %d.\n", MAXJOBS ); exit( EXITBAD ); } @@ -389,6 +405,11 @@ int main( int argc, char * * argv, char * * arg_environ ) if ( ( s = getoptval( optv, 'm', 0 ) ) ) globs.max_buf = atoi( s ) * 1024; /* convert to kb */ + #ifdef HAVE_PYTHON + if ( ( s = getoptval( optv, 'z', 0 ) ) ) + python_optimize = 0; /* disable python optimization */ + #endif + /* Turn on/off debugging */ for ( n = 0; ( s = getoptval( optv, 'd', n ) ); ++n ) { @@ -403,7 +424,7 @@ int main( int argc, char * * argv, char * * arg_environ ) if ( ( i < 0 ) || ( i >= DEBUG_MAX ) ) { - printf( "Invalid debug level '%s'.\n", s ); + out_printf( "Invalid debug level '%s'.\n", s ); continue; } @@ -415,6 +436,17 @@ int main( int argc, char * * argv, char * * arg_environ ) globs.debug[ i-- ] = 1; } + /* If an output file is specified, set globs.out to that. */ + if ( ( s = getoptval( optv, 'o', 0 ) ) ) + { + if ( !( globs.out = fopen( s, "w" ) ) ) + { + err_printf( "Failed to write to '%s'\n", s ); + exit( EXITBAD ); + } + /* ++globs.noexec; */ + } + constants_init(); { @@ -423,6 +455,7 @@ int main( int argc, char * * argv, char * * arg_environ ) #ifdef HAVE_PYTHON { PROFILE_ENTER( MAIN_PYTHON ); + Py_OptimizeFlag = python_optimize; Py_Initialize(); { static PyMethodDef BjamMethods[] = { @@ -583,17 +616,6 @@ int main( int argc, char * * argv, char * * arg_environ ) object_free( target ); } - /* If an output file is specified, set globs.cmdout to that. */ - if ( ( s = getoptval( optv, 'o', 0 ) ) ) - { - if ( !( globs.cmdout = fopen( s, "w" ) ) ) - { - printf( "Failed to write to '%s'\n", s ); - exit( EXITBAD ); - } - ++globs.noexec; - } - /* The build system may set the PARALLELISM variable to override -j * options. */ @@ -603,7 +625,7 @@ int main( int argc, char * * argv, char * * arg_environ ) { int const j = atoi( object_str( list_front( p ) ) ); if ( j < 1 || j > MAXJOBS ) - printf( "Invalid value of PARALLELISM: %s. Valid values " + out_printf( "Invalid value of PARALLELISM: %s. Valid values " "are 1 through %d.\n", object_str( list_front( p ) ), MAXJOBS ); else @@ -658,9 +680,9 @@ int main( int argc, char * * argv, char * * arg_environ ) constants_done(); object_done(); - /* Close cmdout. */ - if ( globs.cmdout ) - fclose( globs.cmdout ); + /* Close log out. */ + if ( globs.out ) + fclose( globs.out ); #ifdef HAVE_PYTHON Py_Finalize(); @@ -718,6 +740,24 @@ char * executable_path( char const * argv0 ) ssize_t const ret = readlink( "/proc/self/exe", buf, sizeof( buf ) ); return ( !ret || ret == sizeof( buf ) ) ? NULL : strndup( buf, ret ); } +#elif defined(OS_VMS) +# include +char * executable_path( char const * argv0 ) +{ + char * vms_path = NULL; + char * posix_path = NULL; + char * p; + + /* On VMS argv[0] shows absolute path to the image file. + * So, just remove VMS file version and translate path to POSIX-style. + */ + vms_path = strdup( argv0 ); + if ( vms_path && ( p = strchr( vms_path, ';') ) ) *p = '\0'; + posix_path = decc$translate_vms( vms_path ); + if ( vms_path ) free( vms_path ); + + return posix_path > 0 ? strdup( posix_path ) : NULL; +} #else char * executable_path( char const * argv0 ) { diff --git a/src/engine/jam.h b/src/engine/jam.h index 2895906af..ec56fe29b 100644 --- a/src/engine/jam.h +++ b/src/engine/jam.h @@ -25,6 +25,43 @@ /* Assume popen support is available unless known otherwise. */ #define HAVE_POPEN 1 +/* + * VMS, OPENVMS + */ + +#ifdef VMS + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define OSMINOR "OS=VMS" +#define OSMAJOR "VMS=true" +#define OS_VMS +#define MAXLINE 1024 /* longest 'together' actions */ +#define PATH_DELIM '/' /* use CRTL POSIX-style handling */ +#define SPLITPATH ',' +#define EXITOK EXIT_SUCCESS +#define EXITBAD EXIT_FAILURE +#define DOWNSHIFT_PATHS + +/* This may be inaccurate. */ +#ifndef __DECC +#define OSPLAT "OSPLAT=VAX" +#endif + +#define glob jam_glob /* use jam's glob, not CRTL's */ + +#endif + /* * Windows NT */ @@ -431,7 +468,7 @@ struct globs int newestfirst; /* build newest sources first */ int pipe_action; char debug[ DEBUG_MAX ]; - FILE * cmdout; /* print cmds, not run them */ + FILE * out; /* mirror output here */ long timeout; /* number of seconds to limit actions to, * default 0 for no limit. */ diff --git a/src/engine/jambase.c b/src/engine/jambase.c index 323ba1564..80a7e12de 100644 --- a/src/engine/jambase.c +++ b/src/engine/jambase.c @@ -464,8 +464,8 @@ char *jambase[] = { "CHMOD ?= set file/prot= ;\n", "CP ?= copy/replace ;\n", "CRELIB ?= true ;\n", -"DOT ?= [] ;\n", -"DOTDOT ?= [-] ;\n", +"DOT ?= . ; ## Use POSIX CRTL\n", +"DOTDOT ?= .. ; ## Use POSIX CRTL\n", "EXEMODE ?= (w:e) ;\n", "FILEMODE ?= (w:r) ;\n", "HDRS ?= ;\n", @@ -473,12 +473,12 @@ char *jambase[] = { "LINKFLAGS ?= \"\" ;\n", "LINKLIBS ?= ;\n", "MKDIR ?= create/dir ;\n", -"MV ?= rename ;\n", +"MV ?= rename /noconf ;\n", "OPTIM ?= \"\" ;\n", -"RM ?= delete ;\n", +"RM ?= delete /noconf ;\n", "RUNVMS ?= mcr ;\n", "SHELLMODE ?= (w:er) ;\n", -"SLASH ?= . ;\n", +"SLASH ?= / ; ## Use POSIX CRTL\n", "STDHDRS ?= decc$library_include ;\n", "SUFEXE ?= .exe ;\n", "SUFLIB ?= .olb ;\n", @@ -740,7 +740,7 @@ char *jambase[] = { "_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n", "if $(VMS) && $(_h)\n", "{\n", -"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n", +"SLASHINC on $(<) = \"/inc=(\" \\\"$(_h[1])\\\" ,\\\"$(_h[2-])\\\" \")\" ;\n", "}\n", "else if $(MAC) && $(_h)\n", "{\n", @@ -765,7 +765,7 @@ char *jambase[] = { "_h = $(SEARCH_SOURCE) $(HDRS) $(SUBDIRHDRS) ;\n", "if $(VMS) && $(_h)\n", "{\n", -"SLASHINC on $(<) = \"/inc=(\" $(_h[1]) ,$(_h[2-]) \")\" ;\n", +"SLASHINC on $(<) = \"/inc=(\" \\\"$(_h[1])\\\" ,\\\"$(_h[2-])\\\" \")\" ;\n", "}\n", "else if $(MAC) && $(_h)\n", "{\n", @@ -1239,19 +1239,6 @@ char *jambase[] = { "{\n", "_s = $(DOT) ;\n", "}\n", -"else if $(VMS)\n", -"{\n", -"switch $(<[1])\n", -"{\n", -"case *:* : _s = $(<[1]) ;\n", -"case \\\\[*\\\\] : _s = $(<[1]) ;\n", -"case * : _s = [.$(<[1])] ;\n", -"}\n", -"for _i in [.$(<[2-])]\n", -"{\n", -"_s = $(_i:R=$(_s)) ;\n", -"}\n", -"}\n", "else if $(MAC)\n", "{\n", "_s = $(DOT) ;\n", @@ -1621,39 +1608,52 @@ char *jambase[] = { "{\n", "actions updated together piecemeal Archive\n", "{\n", -"lib/replace $(<) $(>[1]) ,$(>[2-])\n", +"lib/replace $(<:W) $(>[1]:W) ,$(>[2-]:W)\n", "}\n", "actions Cc\n", "{\n", -"$(CC)/obj=$(<) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>)\n", +"$(CC)/obj=$(<:W) $(CCFLAGS) $(OPTIM) $(SLASHINC) $(>:W)\n", "}\n", "actions C++\n", "{\n", -"$(C++)/obj=$(<) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>)\n", +"$(C++)/obj=$(<:W) $(C++FLAGS) $(OPTIM) $(SLASHINC) $(>:W)\n", "}\n", "actions piecemeal together existing Clean\n", "{\n", -"$(RM) $(>[1]);* ,$(>[2-]);*\n", +"$(RM) $(>[1]:W);* ,$(>[2-]:W);*\n", "}\n", "actions together quietly CreLib\n", "{\n", -"if f$search(\"$(<)\") .eqs. \"\" then lib/create $(<)\n", +"if f$search(\"$(<:W)\") .eqs. \"\" then lib/create $(<:W)\n", "}\n", "actions GenFile1\n", "{\n", -"mcr $(>[1]) $(<) $(>[2-])\n", +"mcr $(>[1]:W) $(<:W) $(>[2-]:W)\n", +"}\n", +"actions MkDir1\n", +"{\n", +"$(MKDIR) $(<:W)\n", +"}\n", +"actions Yacc1\n", +"{\n", +"$(YACC) $(YACCFLAGS) $(>:W)\n", +"}\n", +"actions YaccMv\n", +"{\n", +"$(MV) $(YACCFILES).c $(<[1]:W)\n", +"$(MV) $(YACCFILES).h $(<[2]:W)\n", "}\n", "actions Link bind NEEDLIBS\n", "{\n", -"$(LINK)/exe=$(<) $(LINKFLAGS) $(>[1]) ,$(>[2-]) ,$(NEEDLIBS)/lib ,$(LINKLIBS)\n", +"$(LINK)/exe=$(<:W) $(LINKFLAGS) $(>[1]:W) ,$(>[2-]:W) ,$(NEEDLIBS:W)/lib ,$(LINKLIBS:W)\n", "}\n", "actions quietly updated piecemeal together RmTemps\n", "{\n", -"$(RM) $(>[1]);* ,$(>[2-]);*\n", +"$(RM) $(>[1]:W);* ,$(>[2-]:W);*\n", "}\n", "actions Shell\n", "{\n", -"$(CP) $(>) $(<)\n", +"$(CP) $(>:W) $(<:W)\n", "}\n", "}\n", "else if $(MAC)\n", diff --git a/src/engine/jamgram.c b/src/engine/jamgram.c index 48c85228e..916b08d3e 100644 --- a/src/engine/jamgram.c +++ b/src/engine/jamgram.c @@ -1,9 +1,8 @@ -/* A Bison parser, made by GNU Bison 2.4.3. */ +/* A Bison parser, made by GNU Bison 2.6.4. */ -/* Skeleton implementation for Bison's Yacc-like parsers in C +/* Bison implementation for Yacc-like parsers in C - Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, - 2009, 2010 Free Software Foundation, Inc. + Copyright (C) 1984, 1989-1990, 2000-2012 Free Software Foundation, Inc. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -45,7 +44,7 @@ #define YYBISON 1 /* Bison version. */ -#define YYBISON_VERSION "2.4.3" +#define YYBISON_VERSION "2.6.4" /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" @@ -59,15 +58,12 @@ /* Pull parsers. */ #define YYPULL 1 -/* Using locations. */ -#define YYLSP_NEEDED 0 /* Copy the first part of user declarations. */ - -/* Line 189 of yacc.c */ -#line 96 "jamgram.y" +/* Line 358 of yacc.c */ +#line 98 "jamgram.y" #include "jam.h" @@ -103,19 +99,24 @@ # define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) # define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) # define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) # define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) # define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) +/* Line 358 of yacc.c */ +#line 112 "y.tab.c" -/* Line 189 of yacc.c */ -#line 114 "y.tab.c" - -/* Enabling traces. */ -#ifndef YYDEBUG -# define YYDEBUG 0 -#endif +# ifndef YY_NULL +# if defined __cplusplus && 201103L <= __cplusplus +# define YY_NULL nullptr +# else +# define YY_NULL 0 +# endif +# endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE @@ -125,11 +126,17 @@ # define YYERROR_VERBOSE 0 #endif -/* Enabling the token table. */ -#ifndef YYTOKEN_TABLE -# define YYTOKEN_TABLE 0 +/* In a future release of Bison, this section will be replaced + by #include "y.tab.h". */ +#ifndef YY_YY_Y_TAB_H_INCLUDED +# define YY_YY_Y_TAB_H_INCLUDED +/* Enabling traces. */ +#ifndef YYDEBUG +# define YYDEBUG 1 +#endif +#if YYDEBUG +extern int yydebug; #endif - /* Tokens. */ #ifndef YYTOKENTYPE @@ -156,33 +163,35 @@ _RBRACKET_t = 274, ACTIONS_t = 275, BIND_t = 276, - CASE_t = 277, - CLASS_t = 278, - DEFAULT_t = 279, - ELSE_t = 280, - EXISTING_t = 281, - FOR_t = 282, - IF_t = 283, - IGNORE_t = 284, - IN_t = 285, - INCLUDE_t = 286, - LOCAL_t = 287, - MODULE_t = 288, - ON_t = 289, - PIECEMEAL_t = 290, - QUIETLY_t = 291, - RETURN_t = 292, - RULE_t = 293, - SWITCH_t = 294, - TOGETHER_t = 295, - UPDATED_t = 296, - WHILE_t = 297, - _LBRACE_t = 298, - _BAR_t = 299, - _BARBAR_t = 300, - _RBRACE_t = 301, - ARG = 302, - STRING = 303 + BREAK_t = 277, + CASE_t = 278, + CLASS_t = 279, + CONTINUE_t = 280, + DEFAULT_t = 281, + ELSE_t = 282, + EXISTING_t = 283, + FOR_t = 284, + IF_t = 285, + IGNORE_t = 286, + IN_t = 287, + INCLUDE_t = 288, + LOCAL_t = 289, + MODULE_t = 290, + ON_t = 291, + PIECEMEAL_t = 292, + QUIETLY_t = 293, + RETURN_t = 294, + RULE_t = 295, + SWITCH_t = 296, + TOGETHER_t = 297, + UPDATED_t = 298, + WHILE_t = 299, + _LBRACE_t = 300, + _BAR_t = 301, + _BARBAR_t = 302, + _RBRACE_t = 303, + ARG = 304, + STRING = 305 }; #endif /* Tokens. */ @@ -205,34 +214,35 @@ #define _RBRACKET_t 274 #define ACTIONS_t 275 #define BIND_t 276 -#define CASE_t 277 -#define CLASS_t 278 -#define DEFAULT_t 279 -#define ELSE_t 280 -#define EXISTING_t 281 -#define FOR_t 282 -#define IF_t 283 -#define IGNORE_t 284 -#define IN_t 285 -#define INCLUDE_t 286 -#define LOCAL_t 287 -#define MODULE_t 288 -#define ON_t 289 -#define PIECEMEAL_t 290 -#define QUIETLY_t 291 -#define RETURN_t 292 -#define RULE_t 293 -#define SWITCH_t 294 -#define TOGETHER_t 295 -#define UPDATED_t 296 -#define WHILE_t 297 -#define _LBRACE_t 298 -#define _BAR_t 299 -#define _BARBAR_t 300 -#define _RBRACE_t 301 -#define ARG 302 -#define STRING 303 - +#define BREAK_t 277 +#define CASE_t 278 +#define CLASS_t 279 +#define CONTINUE_t 280 +#define DEFAULT_t 281 +#define ELSE_t 282 +#define EXISTING_t 283 +#define FOR_t 284 +#define IF_t 285 +#define IGNORE_t 286 +#define IN_t 287 +#define INCLUDE_t 288 +#define LOCAL_t 289 +#define MODULE_t 290 +#define ON_t 291 +#define PIECEMEAL_t 292 +#define QUIETLY_t 293 +#define RETURN_t 294 +#define RULE_t 295 +#define SWITCH_t 296 +#define TOGETHER_t 297 +#define UPDATED_t 298 +#define WHILE_t 299 +#define _LBRACE_t 300 +#define _BAR_t 301 +#define _BARBAR_t 302 +#define _RBRACE_t 303 +#define ARG 304 +#define STRING 305 @@ -243,12 +253,28 @@ typedef int YYSTYPE; # define YYSTYPE_IS_DECLARED 1 #endif +extern YYSTYPE yylval; + +#ifdef YYPARSE_PARAM +#if defined __STDC__ || defined __cplusplus +int yyparse (void *YYPARSE_PARAM); +#else +int yyparse (); +#endif +#else /* ! YYPARSE_PARAM */ +#if defined __STDC__ || defined __cplusplus +int yyparse (void); +#else +int yyparse (); +#endif +#endif /* ! YYPARSE_PARAM */ + +#endif /* !YY_YY_Y_TAB_H_INCLUDED */ /* Copy the second part of user declarations. */ - -/* Line 264 of yacc.c */ -#line 252 "y.tab.c" +/* Line 377 of yacc.c */ +#line 278 "y.tab.c" #ifdef short # undef short @@ -301,24 +327,24 @@ typedef short int yytype_int16; # if defined YYENABLE_NLS && YYENABLE_NLS # if ENABLE_NLS # include /* INFRINGES ON USER NAME SPACE */ -# define YY_(msgid) dgettext ("bison-runtime", msgid) +# define YY_(Msgid) dgettext ("bison-runtime", Msgid) # endif # endif # ifndef YY_ -# define YY_(msgid) msgid +# define YY_(Msgid) Msgid # endif #endif /* Suppress unused-variable warnings by "using" E. */ #if ! defined lint || defined __GNUC__ -# define YYUSE(e) ((void) (e)) +# define YYUSE(E) ((void) (E)) #else -# define YYUSE(e) /* empty */ +# define YYUSE(E) /* empty */ #endif /* Identity function, used to suppress warnings about constant conditions. */ #ifndef lint -# define YYID(n) (n) +# define YYID(N) (N) #else #if (defined __STDC__ || defined __C99__FUNC__ \ || defined __cplusplus || defined _MSC_VER) @@ -351,11 +377,12 @@ YYID (yyi) # define alloca _alloca # else # define YYSTACK_ALLOC alloca -# if ! defined _ALLOCA_H && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \ +# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ || defined __cplusplus || defined _MSC_VER) # include /* INFRINGES ON USER NAME SPACE */ -# ifndef _STDLIB_H -# define _STDLIB_H 1 + /* Use EXIT_SUCCESS as a witness for stdlib.h. */ +# ifndef EXIT_SUCCESS +# define EXIT_SUCCESS 0 # endif # endif # endif @@ -378,24 +405,24 @@ YYID (yyi) # ifndef YYSTACK_ALLOC_MAXIMUM # define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM # endif -# if (defined __cplusplus && ! defined _STDLIB_H \ +# if (defined __cplusplus && ! defined EXIT_SUCCESS \ && ! ((defined YYMALLOC || defined malloc) \ && (defined YYFREE || defined free))) # include /* INFRINGES ON USER NAME SPACE */ -# ifndef _STDLIB_H -# define _STDLIB_H 1 +# ifndef EXIT_SUCCESS +# define EXIT_SUCCESS 0 # endif # endif # ifndef YYMALLOC # define YYMALLOC malloc -# if ! defined malloc && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \ +# if ! defined malloc && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ || defined __cplusplus || defined _MSC_VER) void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */ # endif # endif # ifndef YYFREE # define YYFREE free -# if ! defined free && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \ +# if ! defined free && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ || defined __cplusplus || defined _MSC_VER) void free (void *); /* INFRINGES ON USER NAME SPACE */ # endif @@ -424,23 +451,7 @@ union yyalloc ((N) * (sizeof (yytype_int16) + sizeof (YYSTYPE)) \ + YYSTACK_GAP_MAXIMUM) -/* Copy COUNT objects from FROM to TO. The source and destination do - not overlap. */ -# ifndef YYCOPY -# if defined __GNUC__ && 1 < __GNUC__ -# define YYCOPY(To, From, Count) \ - __builtin_memcpy (To, From, (Count) * sizeof (*(From))) -# else -# define YYCOPY(To, From, Count) \ - do \ - { \ - YYSIZE_T yyi; \ - for (yyi = 0; yyi < (Count); yyi++) \ - (To)[yyi] = (From)[yyi]; \ - } \ - while (YYID (0)) -# endif -# endif +# define YYCOPY_NEEDED 1 /* Relocate STACK from its old location to the new one. The local variables YYSIZE and YYSTACKSIZE give the old and new number of @@ -460,23 +471,43 @@ union yyalloc #endif +#if defined YYCOPY_NEEDED && YYCOPY_NEEDED +/* Copy COUNT objects from SRC to DST. The source and destination do + not overlap. */ +# ifndef YYCOPY +# if defined __GNUC__ && 1 < __GNUC__ +# define YYCOPY(Dst, Src, Count) \ + __builtin_memcpy (Dst, Src, (Count) * sizeof (*(Src))) +# else +# define YYCOPY(Dst, Src, Count) \ + do \ + { \ + YYSIZE_T yyi; \ + for (yyi = 0; yyi < (Count); yyi++) \ + (Dst)[yyi] = (Src)[yyi]; \ + } \ + while (YYID (0)) +# endif +# endif +#endif /* !YYCOPY_NEEDED */ + /* YYFINAL -- State number of the termination state. */ -#define YYFINAL 43 +#define YYFINAL 47 /* YYLAST -- Last index in YYTABLE. */ -#define YYLAST 243 +#define YYLAST 238 /* YYNTOKENS -- Number of terminals. */ -#define YYNTOKENS 49 +#define YYNTOKENS 51 /* YYNNTS -- Number of nonterminals. */ #define YYNNTS 24 /* YYNRULES -- Number of rules. */ -#define YYNRULES 75 +#define YYNRULES 77 /* YYNRULES -- Number of states. */ -#define YYNSTATES 159 +#define YYNSTATES 163 /* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 -#define YYMAXUTOK 303 +#define YYMAXUTOK 305 #define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) @@ -514,70 +545,71 @@ static const yytype_uint8 yytranslate[] = 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, - 45, 46, 47, 48 + 45, 46, 47, 48, 49, 50 }; #if YYDEBUG /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in YYRHS. */ -static const yytype_uint8 yyprhs[] = +static const yytype_uint16 yyprhs[] = { 0, 0, 3, 4, 6, 8, 10, 12, 15, 21, 22, 25, 27, 31, 32, 34, 35, 39, 43, 47, - 52, 59, 63, 72, 78, 84, 90, 96, 102, 110, - 116, 120, 121, 122, 132, 134, 136, 138, 141, 143, - 147, 151, 155, 159, 163, 167, 171, 175, 179, 183, - 187, 190, 194, 195, 198, 203, 205, 209, 211, 212, - 215, 217, 218, 223, 226, 231, 236, 237, 240, 242, - 244, 246, 248, 250, 252, 253 + 52, 59, 63, 66, 69, 78, 84, 90, 96, 102, + 108, 116, 122, 126, 127, 128, 138, 140, 142, 144, + 147, 149, 153, 157, 161, 165, 169, 173, 177, 181, + 185, 189, 193, 196, 200, 201, 204, 209, 211, 215, + 217, 218, 221, 223, 224, 229, 232, 237, 242, 243, + 246, 248, 250, 252, 254, 256, 258, 259 }; /* YYRHS -- A `-1'-separated list of the rules' RHS. */ static const yytype_int8 yyrhs[] = { - 50, 0, -1, -1, 52, -1, 53, -1, 52, -1, - 57, -1, 57, 52, -1, 32, 65, 54, 11, 51, - -1, -1, 14, 65, -1, 53, -1, 7, 64, 8, - -1, -1, 32, -1, -1, 43, 51, 46, -1, 31, - 65, 11, -1, 47, 64, 11, -1, 67, 60, 65, - 11, -1, 67, 34, 65, 60, 65, 11, -1, 37, - 65, 11, -1, 27, 56, 47, 30, 65, 43, 51, - 46, -1, 39, 65, 43, 62, 46, -1, 28, 61, - 43, 51, 46, -1, 33, 65, 43, 51, 46, -1, - 23, 64, 43, 51, 46, -1, 42, 61, 43, 51, - 46, -1, 28, 61, 43, 51, 46, 25, 57, -1, - 56, 38, 47, 55, 57, -1, 34, 67, 57, -1, - -1, -1, 20, 70, 47, 72, 43, 58, 48, 59, - 46, -1, 14, -1, 9, -1, 17, -1, 24, 14, - -1, 67, -1, 61, 14, 61, -1, 61, 4, 61, - -1, 61, 12, 61, -1, 61, 13, 61, -1, 61, - 15, 61, -1, 61, 16, 61, -1, 61, 5, 61, - -1, 61, 6, 61, -1, 61, 44, 61, -1, 61, - 45, 61, -1, 67, 30, 65, -1, 3, 61, -1, - 7, 61, 8, -1, -1, 63, 62, -1, 22, 47, - 10, 51, -1, 65, -1, 65, 10, 64, -1, 66, - -1, -1, 66, 67, -1, 47, -1, -1, 18, 68, - 69, 19, -1, 47, 64, -1, 34, 67, 47, 64, - -1, 34, 67, 37, 65, -1, -1, 70, 71, -1, - 41, -1, 40, -1, 29, -1, 36, -1, 35, -1, - 26, -1, -1, 21, 65, -1 + 52, 0, -1, -1, 54, -1, 55, -1, 54, -1, + 59, -1, 59, 54, -1, 34, 67, 56, 11, 53, + -1, -1, 14, 67, -1, 55, -1, 7, 66, 8, + -1, -1, 34, -1, -1, 45, 53, 48, -1, 33, + 67, 11, -1, 49, 66, 11, -1, 69, 62, 67, + 11, -1, 69, 36, 67, 62, 67, 11, -1, 39, + 67, 11, -1, 22, 11, -1, 25, 11, -1, 29, + 58, 49, 32, 67, 45, 53, 48, -1, 41, 67, + 45, 64, 48, -1, 30, 63, 45, 53, 48, -1, + 35, 67, 45, 53, 48, -1, 24, 66, 45, 53, + 48, -1, 44, 63, 45, 53, 48, -1, 30, 63, + 45, 53, 48, 27, 59, -1, 58, 40, 49, 57, + 59, -1, 36, 69, 59, -1, -1, -1, 20, 72, + 49, 74, 45, 60, 50, 61, 48, -1, 14, -1, + 9, -1, 17, -1, 26, 14, -1, 69, -1, 63, + 14, 63, -1, 63, 4, 63, -1, 63, 12, 63, + -1, 63, 13, 63, -1, 63, 15, 63, -1, 63, + 16, 63, -1, 63, 5, 63, -1, 63, 6, 63, + -1, 63, 46, 63, -1, 63, 47, 63, -1, 69, + 32, 67, -1, 3, 63, -1, 7, 63, 8, -1, + -1, 65, 64, -1, 23, 49, 10, 53, -1, 67, + -1, 67, 10, 66, -1, 68, -1, -1, 68, 69, + -1, 49, -1, -1, 18, 70, 71, 19, -1, 49, + 66, -1, 36, 69, 49, 66, -1, 36, 69, 39, + 67, -1, -1, 72, 73, -1, 43, -1, 42, -1, + 31, -1, 38, -1, 37, -1, 28, -1, -1, 21, + 67, -1 }; /* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const yytype_uint16 yyrline[] = { - 0, 139, 139, 141, 152, 154, 158, 160, 162, 167, - 170, 172, 176, 179, 182, 185, 188, 190, 192, 194, - 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, - 216, 219, 221, 218, 230, 232, 234, 236, 243, 245, - 247, 249, 251, 253, 255, 257, 259, 261, 263, 265, - 267, 269, 281, 282, 286, 295, 297, 307, 312, 313, - 317, 319, 319, 328, 330, 332, 343, 344, 348, 350, - 352, 354, 356, 358, 368, 369 + 0, 144, 144, 146, 157, 159, 163, 165, 167, 172, + 175, 177, 181, 184, 187, 190, 193, 195, 197, 199, + 201, 203, 205, 207, 209, 211, 213, 215, 217, 219, + 221, 223, 225, 228, 230, 227, 239, 241, 243, 245, + 252, 254, 256, 258, 260, 262, 264, 266, 268, 270, + 272, 274, 276, 278, 290, 291, 295, 304, 306, 316, + 321, 322, 326, 328, 328, 337, 339, 341, 352, 353, + 357, 359, 361, 363, 365, 367, 377, 378 }; #endif -#if YYDEBUG || YYERROR_VERBOSE || YYTOKEN_TABLE +#if YYDEBUG || YYERROR_VERBOSE || 0 /* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = @@ -586,14 +618,15 @@ static const char *const yytname[] = "_AMPERAMPER_t", "_LPAREN_t", "_RPAREN_t", "_PLUS_EQUALS_t", "_COLON_t", "_SEMIC_t", "_LANGLE_t", "_LANGLE_EQUALS_t", "_EQUALS_t", "_RANGLE_t", "_RANGLE_EQUALS_t", "_QUESTION_EQUALS_t", "_LBRACKET_t", "_RBRACKET_t", - "ACTIONS_t", "BIND_t", "CASE_t", "CLASS_t", "DEFAULT_t", "ELSE_t", - "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t", "INCLUDE_t", - "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t", "RETURN_t", - "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t", "_LBRACE_t", - "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING", "$accept", "run", - "block", "rules", "null", "assign_list_opt", "arglist_opt", "local_opt", - "rule", "$@1", "$@2", "assign", "expr", "cases", "case", "lol", "list", - "listp", "arg", "$@3", "func", "eflags", "eflag", "bindlist", 0 + "ACTIONS_t", "BIND_t", "BREAK_t", "CASE_t", "CLASS_t", "CONTINUE_t", + "DEFAULT_t", "ELSE_t", "EXISTING_t", "FOR_t", "IF_t", "IGNORE_t", "IN_t", + "INCLUDE_t", "LOCAL_t", "MODULE_t", "ON_t", "PIECEMEAL_t", "QUIETLY_t", + "RETURN_t", "RULE_t", "SWITCH_t", "TOGETHER_t", "UPDATED_t", "WHILE_t", + "_LBRACE_t", "_BAR_t", "_BARBAR_t", "_RBRACE_t", "ARG", "STRING", + "$accept", "run", "block", "rules", "null", "assign_list_opt", + "arglist_opt", "local_opt", "rule", "$@1", "$@2", "assign", "expr", + "cases", "case", "lol", "list", "listp", "arg", "$@3", "func", "eflags", + "eflag", "bindlist", YY_NULL }; #endif @@ -606,21 +639,22 @@ static const yytype_uint16 yytoknum[] = 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, - 295, 296, 297, 298, 299, 300, 301, 302, 303 + 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, + 305 }; # endif /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ static const yytype_uint8 yyr1[] = { - 0, 49, 50, 50, 51, 51, 52, 52, 52, 53, - 54, 54, 55, 55, 56, 56, 57, 57, 57, 57, - 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, - 57, 58, 59, 57, 60, 60, 60, 60, 61, 61, - 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, - 61, 61, 62, 62, 63, 64, 64, 65, 66, 66, - 67, 68, 67, 69, 69, 69, 70, 70, 71, 71, - 71, 71, 71, 71, 72, 72 + 0, 51, 52, 52, 53, 53, 54, 54, 54, 55, + 56, 56, 57, 57, 58, 58, 59, 59, 59, 59, + 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, + 59, 59, 59, 60, 61, 59, 62, 62, 62, 62, + 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, + 63, 63, 63, 63, 64, 64, 65, 66, 66, 67, + 68, 68, 69, 70, 69, 71, 71, 71, 72, 72, + 73, 73, 73, 73, 73, 73, 74, 74 }; /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ @@ -628,159 +662,165 @@ static const yytype_uint8 yyr2[] = { 0, 2, 0, 1, 1, 1, 1, 2, 5, 0, 2, 1, 3, 0, 1, 0, 3, 3, 3, 4, - 6, 3, 8, 5, 5, 5, 5, 5, 7, 5, - 3, 0, 0, 9, 1, 1, 1, 2, 1, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 2, 3, 0, 2, 4, 1, 3, 1, 0, 2, - 1, 0, 4, 2, 4, 4, 0, 2, 1, 1, - 1, 1, 1, 1, 0, 2 + 6, 3, 2, 2, 8, 5, 5, 5, 5, 5, + 7, 5, 3, 0, 0, 9, 1, 1, 1, 2, + 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 2, 3, 0, 2, 4, 1, 3, 1, + 0, 2, 1, 0, 4, 2, 4, 4, 0, 2, + 1, 1, 1, 1, 1, 1, 0, 2 }; -/* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state - STATE-NUM when YYTABLE doesn't specify something else to do. Zero +/* YYDEFACT[STATE-NAME] -- Default reduction number in state STATE-NUM. + Performed when YYTABLE doesn't specify something else to do. Zero means the default is an error. */ static const yytype_uint8 yydefact[] = { - 2, 61, 66, 58, 15, 0, 58, 58, 58, 0, - 58, 58, 0, 9, 60, 0, 3, 0, 6, 0, - 0, 0, 0, 55, 57, 14, 0, 0, 0, 60, - 0, 38, 0, 9, 0, 15, 0, 0, 0, 0, - 5, 4, 0, 1, 0, 7, 35, 34, 36, 0, - 58, 58, 0, 58, 0, 73, 70, 72, 71, 69, - 68, 74, 67, 9, 58, 59, 0, 50, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, - 58, 17, 58, 11, 0, 9, 30, 21, 52, 9, - 16, 18, 13, 37, 0, 0, 0, 63, 62, 58, - 0, 0, 56, 58, 51, 40, 45, 46, 41, 42, - 39, 43, 44, 0, 47, 48, 49, 10, 9, 0, - 0, 0, 52, 0, 58, 15, 58, 19, 58, 58, - 75, 31, 26, 0, 24, 8, 25, 0, 23, 53, - 27, 0, 29, 0, 65, 64, 0, 9, 15, 9, - 12, 20, 32, 0, 28, 54, 0, 22, 33 + 2, 63, 68, 0, 60, 0, 15, 0, 60, 60, + 60, 0, 60, 60, 0, 9, 62, 0, 3, 0, + 6, 0, 0, 0, 22, 0, 57, 59, 23, 14, + 0, 0, 0, 62, 0, 40, 0, 9, 0, 15, + 0, 0, 0, 0, 5, 4, 0, 1, 0, 7, + 37, 36, 38, 0, 60, 60, 0, 60, 0, 75, + 72, 74, 73, 71, 70, 76, 69, 9, 60, 61, + 0, 52, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 9, 0, 0, 60, 17, 60, 11, 0, 9, + 32, 21, 54, 9, 16, 18, 13, 39, 0, 0, + 0, 65, 64, 60, 0, 0, 58, 60, 53, 42, + 47, 48, 43, 44, 41, 45, 46, 0, 49, 50, + 51, 10, 9, 0, 0, 0, 54, 0, 60, 15, + 60, 19, 60, 60, 77, 33, 28, 0, 26, 8, + 27, 0, 25, 55, 29, 0, 31, 0, 67, 66, + 0, 9, 15, 9, 12, 20, 34, 0, 30, 56, + 0, 24, 35 }; /* YYDEFGOTO[NTERM-NUM]. */ static const yytype_int16 yydefgoto[] = { - -1, 15, 39, 40, 41, 84, 125, 17, 18, 146, - 156, 51, 30, 121, 122, 22, 23, 24, 31, 20, - 54, 21, 62, 100 + -1, 17, 43, 44, 45, 88, 129, 19, 20, 150, + 160, 55, 34, 125, 126, 25, 26, 27, 35, 22, + 58, 23, 66, 104 }; /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing STATE-NUM. */ -#define YYPACT_NINF -48 +#define YYPACT_NINF -50 static const yytype_int16 yypact[] = { - 170, -48, -48, -48, -12, 7, -48, -17, -48, -3, - -48, -48, 7, 170, 1, 22, -48, -9, 170, 19, - -2, 79, -6, 29, -3, -48, 2, 7, 7, -48, - 138, 20, 44, 45, 18, 196, 51, 26, 151, 24, - -48, -48, 62, -48, 27, -48, -48, -48, -48, 61, - -48, -48, -3, -48, 67, -48, -48, -48, -48, -48, - -48, 58, -48, 170, -48, -48, 50, -48, 52, 7, - 7, 7, 7, 7, 7, 7, 7, 170, 7, 7, - -48, -48, -48, -48, 70, 170, -48, -48, 87, 170, - -48, -48, 94, -48, 17, 99, -20, -48, -48, -48, - 69, 71, -48, -48, -48, 91, 156, 156, -48, -48, - 91, -48, -48, 77, 78, 78, -48, -48, 170, 81, - 66, 82, 87, 95, -48, 196, -48, -48, -48, -48, - -48, -48, -48, 97, 112, -48, -48, 135, -48, -48, - -48, 150, -48, 148, -48, -48, 98, 170, 196, 170, - -48, -48, -48, 115, -48, -48, 116, -48, -48 + 161, -50, -50, -1, -50, 6, -25, 5, -50, -5, + -50, -11, -50, -50, 5, 161, 11, 28, -50, -4, + 161, 61, 9, -12, -50, 14, 51, -11, -50, -50, + 27, 5, 5, -50, 37, 53, 69, 72, 43, 189, + 79, 46, 128, 44, -50, -50, 84, -50, 49, -50, + -50, -50, -50, 85, -50, -50, -11, -50, 82, -50, + -50, -50, -50, -50, -50, 92, -50, 161, -50, -50, + 71, -50, 142, 5, 5, 5, 5, 5, 5, 5, + 5, 161, 5, 5, -50, -50, -50, -50, 105, 161, + -50, -50, 95, 161, -50, -50, 112, -50, 48, 109, + -15, -50, -50, -50, 76, 80, -50, -50, -50, 56, + 123, 123, -50, -50, 56, -50, -50, 97, 155, 155, + -50, -50, 161, 101, 75, 114, 95, 115, -50, 189, + -50, -50, -50, -50, -50, -50, -50, 119, 103, -50, + -50, 156, -50, -50, -50, 157, -50, 165, -50, -50, + 122, 161, 189, 161, -50, -50, -50, 129, -50, -50, + 130, -50, -50 }; /* YYPGOTO[NTERM-NUM]. */ static const yytype_int16 yypgoto[] = { - -48, -48, -47, 5, 140, -48, -48, 171, -27, -48, - -48, 80, 60, 54, -48, -13, -4, -48, 0, -48, - -48, -48, -48, -48 + -50, -50, -49, 13, 143, -50, -50, 176, -35, -50, + -50, 86, 32, 66, -50, -2, -7, -50, 0, -50, + -50, -50, -50, -50 }; /* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If positive, shift that token. If negative, reduce the rule which - number is the opposite. If zero, do what YYDEFACT says. - If YYTABLE_NINF, syntax error. */ -#define YYTABLE_NINF -59 + number is the opposite. If YYTABLE_NINF, syntax error. */ +#define YYTABLE_NINF -61 static const yytype_int16 yytable[] = { - 19, 42, 32, 33, 34, 16, 36, 37, 86, 35, - 27, -58, -58, 19, 28, 1, 101, 128, 19, -58, - 25, -14, 43, 45, 65, 1, 46, 129, 46, 44, - 113, 47, 52, 47, 48, 19, 48, 63, 119, 64, - 97, 49, 123, 49, 29, 53, 94, 95, -58, 66, - 80, 102, 96, 50, 29, 81, 69, 70, 71, 82, - 104, 85, 87, 19, 72, 73, 74, 75, 76, 88, - 90, 135, 38, 91, 92, 93, 116, 19, 117, 99, - 103, 118, 69, 70, 71, 19, 98, 67, 68, 19, - 72, 73, 74, 75, 76, 130, 78, 79, 142, 133, - 153, 124, 155, 72, 73, 55, 75, 76, 56, 120, - 127, 141, 131, 137, 57, 58, 145, 132, 19, 59, - 60, 154, 143, 134, 144, 19, 61, 136, 138, 105, - 106, 107, 108, 109, 110, 111, 112, 148, 114, 115, - 147, 140, 69, 70, 71, 149, 152, 19, 19, 19, - 72, 73, 74, 75, 76, 69, 70, 71, 150, 151, - 69, 157, 158, 72, 73, 74, 75, 76, 72, 73, - 74, 75, 76, 83, 126, 26, 139, 0, 0, 0, - 0, 77, 78, 79, 0, 0, 0, 0, 1, 0, - 2, 0, 0, 3, 89, 78, 79, 4, 5, 0, - 0, 6, 7, 8, 9, 0, 0, 10, -15, 11, - 0, 0, 12, 13, 1, 0, 2, 14, 0, 3, - 0, 0, 0, 4, 5, 0, 0, 6, 25, 8, - 9, 0, 0, 10, 0, 11, 0, 0, 12, 13, - 0, 0, 0, 14 + 21, 36, 37, 38, 90, 40, 41, 1, 31, 29, + 24, 39, 32, 18, 46, 21, 59, 28, 105, 60, + 21, -60, -60, 1, 132, 61, 62, 69, 47, -60, + 63, 64, 117, 49, 133, -14, 48, 65, 33, 21, + 123, 73, 74, 75, 127, 56, 42, 98, 99, 76, + 77, 78, 79, 80, 33, 101, 100, 50, 57, 67, + -60, 68, 51, 71, 72, 52, 106, 21, 76, 77, + 50, 79, 80, 139, 53, 51, 70, 120, 52, 121, + 85, 21, 81, 82, 83, 84, 86, 53, 89, 21, + 91, 92, 94, 21, 146, 95, 134, 54, 96, 97, + 137, 102, 157, 107, 159, 109, 110, 111, 112, 113, + 114, 115, 116, 103, 118, 119, 122, 158, 124, 128, + 131, 135, 21, 147, 141, 148, 145, 73, 136, 21, + 152, 149, 73, 74, 75, 76, 77, 78, 79, 80, + 76, 77, 78, 79, 80, 138, 73, 74, 75, 140, + 108, 21, 21, 21, 76, 77, 78, 79, 80, 73, + 74, 75, 142, 144, 151, 154, 153, 76, 77, 78, + 79, 80, 156, 93, 82, 83, 155, 161, 162, 1, + 87, 2, 30, 3, 130, 4, 5, 0, 82, 83, + 6, 7, 143, 0, 8, 9, 10, 11, 0, 0, + 12, -15, 13, 0, 0, 14, 15, 1, 0, 2, + 16, 3, 0, 4, 5, 0, 0, 0, 6, 7, + 0, 0, 8, 29, 10, 11, 0, 0, 12, 0, + 13, 0, 0, 14, 15, 0, 0, 0, 16 }; +#define yypact_value_is_default(Yystate) \ + (!!((Yystate) == (-50))) + +#define yytable_value_is_error(Yytable_value) \ + YYID (0) + static const yytype_int16 yycheck[] = { - 0, 14, 6, 7, 8, 0, 10, 11, 35, 9, - 3, 10, 11, 13, 7, 18, 63, 37, 18, 18, - 32, 38, 0, 18, 24, 18, 9, 47, 9, 38, - 77, 14, 34, 14, 17, 35, 17, 43, 85, 10, - 53, 24, 89, 24, 47, 47, 50, 51, 47, 47, - 30, 64, 52, 34, 47, 11, 4, 5, 6, 14, - 8, 43, 11, 63, 12, 13, 14, 15, 16, 43, - 46, 118, 12, 11, 47, 14, 80, 77, 82, 21, - 30, 11, 4, 5, 6, 85, 19, 27, 28, 89, - 12, 13, 14, 15, 16, 99, 44, 45, 125, 103, - 147, 7, 149, 12, 13, 26, 15, 16, 29, 22, - 11, 124, 43, 47, 35, 36, 129, 46, 118, 40, - 41, 148, 126, 46, 128, 125, 47, 46, 46, 69, - 70, 71, 72, 73, 74, 75, 76, 25, 78, 79, - 43, 46, 4, 5, 6, 10, 48, 147, 148, 149, - 12, 13, 14, 15, 16, 4, 5, 6, 8, 11, - 4, 46, 46, 12, 13, 14, 15, 16, 12, 13, - 14, 15, 16, 33, 94, 4, 122, -1, -1, -1, - -1, 43, 44, 45, -1, -1, -1, -1, 18, -1, - 20, -1, -1, 23, 43, 44, 45, 27, 28, -1, - -1, 31, 32, 33, 34, -1, -1, 37, 38, 39, - -1, -1, 42, 43, 18, -1, 20, 47, -1, 23, - -1, -1, -1, 27, 28, -1, -1, 31, 32, 33, - 34, -1, -1, 37, -1, 39, -1, -1, 42, 43, - -1, -1, -1, 47 + 0, 8, 9, 10, 39, 12, 13, 18, 3, 34, + 11, 11, 7, 0, 16, 15, 28, 11, 67, 31, + 20, 10, 11, 18, 39, 37, 38, 27, 0, 18, + 42, 43, 81, 20, 49, 40, 40, 49, 49, 39, + 89, 4, 5, 6, 93, 36, 14, 54, 55, 12, + 13, 14, 15, 16, 49, 57, 56, 9, 49, 45, + 49, 10, 14, 31, 32, 17, 68, 67, 12, 13, + 9, 15, 16, 122, 26, 14, 49, 84, 17, 86, + 11, 81, 45, 46, 47, 32, 14, 26, 45, 89, + 11, 45, 48, 93, 129, 11, 103, 36, 49, 14, + 107, 19, 151, 32, 153, 73, 74, 75, 76, 77, + 78, 79, 80, 21, 82, 83, 11, 152, 23, 7, + 11, 45, 122, 130, 49, 132, 128, 4, 48, 129, + 27, 133, 4, 5, 6, 12, 13, 14, 15, 16, + 12, 13, 14, 15, 16, 48, 4, 5, 6, 48, + 8, 151, 152, 153, 12, 13, 14, 15, 16, 4, + 5, 6, 48, 48, 45, 8, 10, 12, 13, 14, + 15, 16, 50, 45, 46, 47, 11, 48, 48, 18, + 37, 20, 6, 22, 98, 24, 25, -1, 46, 47, + 29, 30, 126, -1, 33, 34, 35, 36, -1, -1, + 39, 40, 41, -1, -1, 44, 45, 18, -1, 20, + 49, 22, -1, 24, 25, -1, -1, -1, 29, 30, + -1, -1, 33, 34, 35, 36, -1, -1, 39, -1, + 41, -1, -1, 44, 45, -1, -1, -1, 49 }; /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing symbol of state STATE-NUM. */ static const yytype_uint8 yystos[] = { - 0, 18, 20, 23, 27, 28, 31, 32, 33, 34, - 37, 39, 42, 43, 47, 50, 52, 56, 57, 67, - 68, 70, 64, 65, 66, 32, 56, 3, 7, 47, - 61, 67, 65, 65, 65, 67, 65, 65, 61, 51, - 52, 53, 64, 0, 38, 52, 9, 14, 17, 24, - 34, 60, 34, 47, 69, 26, 29, 35, 36, 40, - 41, 47, 71, 43, 10, 67, 47, 61, 61, 4, - 5, 6, 12, 13, 14, 15, 16, 43, 44, 45, - 30, 11, 14, 53, 54, 43, 57, 11, 43, 43, - 46, 11, 47, 14, 65, 65, 67, 64, 19, 21, - 72, 51, 64, 30, 8, 61, 61, 61, 61, 61, - 61, 61, 61, 51, 61, 61, 65, 65, 11, 51, - 22, 62, 63, 51, 7, 55, 60, 11, 37, 47, - 65, 43, 46, 65, 46, 51, 46, 47, 46, 62, - 46, 64, 57, 65, 65, 64, 58, 43, 25, 10, - 8, 11, 48, 51, 57, 51, 59, 46, 46 + 0, 18, 20, 22, 24, 25, 29, 30, 33, 34, + 35, 36, 39, 41, 44, 45, 49, 52, 54, 58, + 59, 69, 70, 72, 11, 66, 67, 68, 11, 34, + 58, 3, 7, 49, 63, 69, 67, 67, 67, 69, + 67, 67, 63, 53, 54, 55, 66, 0, 40, 54, + 9, 14, 17, 26, 36, 62, 36, 49, 71, 28, + 31, 37, 38, 42, 43, 49, 73, 45, 10, 69, + 49, 63, 63, 4, 5, 6, 12, 13, 14, 15, + 16, 45, 46, 47, 32, 11, 14, 55, 56, 45, + 59, 11, 45, 45, 48, 11, 49, 14, 67, 67, + 69, 66, 19, 21, 74, 53, 66, 32, 8, 63, + 63, 63, 63, 63, 63, 63, 63, 53, 63, 63, + 67, 67, 11, 53, 23, 64, 65, 53, 7, 57, + 62, 11, 39, 49, 67, 45, 48, 67, 48, 53, + 48, 49, 48, 64, 48, 66, 59, 67, 67, 66, + 60, 45, 27, 10, 8, 11, 50, 53, 59, 53, + 61, 48, 48 }; #define yyerrok (yyerrstatus = 0) @@ -810,18 +850,18 @@ static const yytype_uint8 yystos[] = #define YYRECOVERING() (!!yyerrstatus) -#define YYBACKUP(Token, Value) \ -do \ - if (yychar == YYEMPTY && yylen == 1) \ - { \ - yychar = (Token); \ - yylval = (Value); \ - yytoken = YYTRANSLATE (yychar); \ - YYPOPSTACK (1); \ - goto yybackup; \ - } \ - else \ - { \ +#define YYBACKUP(Token, Value) \ +do \ + if (yychar == YYEMPTY) \ + { \ + yychar = (Token); \ + yylval = (Value); \ + YYPOPSTACK (yylen); \ + yystate = *yyssp; \ + goto yybackup; \ + } \ + else \ + { \ yyerror (YY_("syntax error: cannot back up")); \ YYERROR; \ } \ @@ -831,46 +871,38 @@ while (YYID (0)) #define YYTERROR 1 #define YYERRCODE 256 - /* YYLLOC_DEFAULT -- Set CURRENT to span from RHS[1] to RHS[N]. If N is 0, then set CURRENT to the empty location which ends the previous symbol: RHS[0] (always defined). */ -#define YYRHSLOC(Rhs, K) ((Rhs)[K]) #ifndef YYLLOC_DEFAULT -# define YYLLOC_DEFAULT(Current, Rhs, N) \ - do \ - if (YYID (N)) \ - { \ - (Current).first_line = YYRHSLOC (Rhs, 1).first_line; \ - (Current).first_column = YYRHSLOC (Rhs, 1).first_column; \ - (Current).last_line = YYRHSLOC (Rhs, N).last_line; \ - (Current).last_column = YYRHSLOC (Rhs, N).last_column; \ - } \ - else \ - { \ - (Current).first_line = (Current).last_line = \ - YYRHSLOC (Rhs, 0).last_line; \ - (Current).first_column = (Current).last_column = \ - YYRHSLOC (Rhs, 0).last_column; \ - } \ +# define YYLLOC_DEFAULT(Current, Rhs, N) \ + do \ + if (YYID (N)) \ + { \ + (Current).first_line = YYRHSLOC (Rhs, 1).first_line; \ + (Current).first_column = YYRHSLOC (Rhs, 1).first_column; \ + (Current).last_line = YYRHSLOC (Rhs, N).last_line; \ + (Current).last_column = YYRHSLOC (Rhs, N).last_column; \ + } \ + else \ + { \ + (Current).first_line = (Current).last_line = \ + YYRHSLOC (Rhs, 0).last_line; \ + (Current).first_column = (Current).last_column = \ + YYRHSLOC (Rhs, 0).last_column; \ + } \ while (YYID (0)) #endif +#define YYRHSLOC(Rhs, K) ((Rhs)[K]) -/* YY_LOCATION_PRINT -- Print the location on the stream. - This macro was not mandated originally: define only if we know - we won't break user code: when these are the locations we know. */ + + +/* This macro is provided for backward compatibility. */ #ifndef YY_LOCATION_PRINT -# if defined YYLTYPE_IS_TRIVIAL && YYLTYPE_IS_TRIVIAL -# define YY_LOCATION_PRINT(File, Loc) \ - fprintf (File, "%d.%d-%d.%d", \ - (Loc).first_line, (Loc).first_column, \ - (Loc).last_line, (Loc).last_column) -# else -# define YY_LOCATION_PRINT(File, Loc) ((void) 0) -# endif +# define YY_LOCATION_PRINT(File, Loc) ((void) 0) #endif @@ -925,6 +957,8 @@ yy_symbol_value_print (yyoutput, yytype, yyvaluep) YYSTYPE const * const yyvaluep; #endif { + FILE *yyo = yyoutput; + YYUSE (yyo); if (!yyvaluep) return; # ifdef YYPRINT @@ -1062,7 +1096,6 @@ int yydebug; # define YYMAXDEPTH 10000 #endif - #if YYERROR_VERBOSE @@ -1165,115 +1198,142 @@ yytnamerr (char *yyres, const char *yystr) } # endif -/* Copy into YYRESULT an error message about the unexpected token - YYCHAR while in state YYSTATE. Return the number of bytes copied, - including the terminating null byte. If YYRESULT is null, do not - copy anything; just return the number of bytes that would be - copied. As a special case, return 0 if an ordinary "syntax error" - message will do. Return YYSIZE_MAXIMUM if overflow occurs during - size calculation. */ -static YYSIZE_T -yysyntax_error (char *yyresult, int yystate, int yychar) +/* Copy into *YYMSG, which is of size *YYMSG_ALLOC, an error message + about the unexpected token YYTOKEN for the state stack whose top is + YYSSP. + + Return 0 if *YYMSG was successfully written. Return 1 if *YYMSG is + not large enough to hold the message. In that case, also set + *YYMSG_ALLOC to the required number of bytes. Return 2 if the + required number of bytes is too large to store. */ +static int +yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, + yytype_int16 *yyssp, int yytoken) { - int yyn = yypact[yystate]; + YYSIZE_T yysize0 = yytnamerr (YY_NULL, yytname[yytoken]); + YYSIZE_T yysize = yysize0; + YYSIZE_T yysize1; + enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 }; + /* Internationalized format string. */ + const char *yyformat = YY_NULL; + /* Arguments of yyformat. */ + char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM]; + /* Number of reported tokens (one for the "unexpected", one per + "expected"). */ + int yycount = 0; - if (! (YYPACT_NINF < yyn && yyn <= YYLAST)) - return 0; - else + /* There are many possibilities here to consider: + - Assume YYFAIL is not used. It's too flawed to consider. See + + for details. YYERROR is fine as it does not invoke this + function. + - If this state is a consistent state with a default action, then + the only way this function was invoked is if the default action + is an error action. In that case, don't check for expected + tokens because there are none. + - The only way there can be no lookahead present (in yychar) is if + this state is a consistent state with a default action. Thus, + detecting the absence of a lookahead is sufficient to determine + that there is no unexpected or expected token to report. In that + case, just report a simple "syntax error". + - Don't assume there isn't a lookahead just because this state is a + consistent state with a default action. There might have been a + previous inconsistent state, consistent state with a non-default + action, or user semantic action that manipulated yychar. + - Of course, the expected token list depends on states to have + correct lookahead information, and it depends on the parser not + to perform extra reductions after fetching a lookahead from the + scanner and before detecting a syntax error. Thus, state merging + (from LALR or IELR) and default reductions corrupt the expected + token list. However, the list is correct for canonical LR with + one exception: it will still contain any token that will not be + accepted due to an error action in a later state. + */ + if (yytoken != YYEMPTY) { - int yytype = YYTRANSLATE (yychar); - YYSIZE_T yysize0 = yytnamerr (0, yytname[yytype]); - YYSIZE_T yysize = yysize0; - YYSIZE_T yysize1; - int yysize_overflow = 0; - enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 }; - char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM]; - int yyx; + int yyn = yypact[*yyssp]; + yyarg[yycount++] = yytname[yytoken]; + if (!yypact_value_is_default (yyn)) + { + /* Start YYX at -YYN if negative to avoid negative indexes in + YYCHECK. In other words, skip the first -YYN actions for + this state because they are default actions. */ + int yyxbegin = yyn < 0 ? -yyn : 0; + /* Stay within bounds of both yycheck and yytname. */ + int yychecklim = YYLAST - yyn + 1; + int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS; + int yyx; -# if 0 - /* This is so xgettext sees the translatable formats that are - constructed on the fly. */ - YY_("syntax error, unexpected %s"); - YY_("syntax error, unexpected %s, expecting %s"); - YY_("syntax error, unexpected %s, expecting %s or %s"); - YY_("syntax error, unexpected %s, expecting %s or %s or %s"); - YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s"); -# endif - char *yyfmt; - char const *yyf; - static char const yyunexpected[] = "syntax error, unexpected %s"; - static char const yyexpecting[] = ", expecting %s"; - static char const yyor[] = " or %s"; - char yyformat[sizeof yyunexpected - + sizeof yyexpecting - 1 - + ((YYERROR_VERBOSE_ARGS_MAXIMUM - 2) - * (sizeof yyor - 1))]; - char const *yyprefix = yyexpecting; - - /* Start YYX at -YYN if negative to avoid negative indexes in - YYCHECK. */ - int yyxbegin = yyn < 0 ? -yyn : 0; - - /* Stay within bounds of both yycheck and yytname. */ - int yychecklim = YYLAST - yyn + 1; - int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS; - int yycount = 1; - - yyarg[0] = yytname[yytype]; - yyfmt = yystpcpy (yyformat, yyunexpected); - - for (yyx = yyxbegin; yyx < yyxend; ++yyx) - if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR) - { - if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM) - { - yycount = 1; - yysize = yysize0; - yyformat[sizeof yyunexpected - 1] = '\0'; - break; - } - yyarg[yycount++] = yytname[yyx]; - yysize1 = yysize + yytnamerr (0, yytname[yyx]); - yysize_overflow |= (yysize1 < yysize); - yysize = yysize1; - yyfmt = yystpcpy (yyfmt, yyprefix); - yyprefix = yyor; - } - - yyf = YY_(yyformat); - yysize1 = yysize + yystrlen (yyf); - yysize_overflow |= (yysize1 < yysize); - yysize = yysize1; - - if (yysize_overflow) - return YYSIZE_MAXIMUM; - - if (yyresult) - { - /* Avoid sprintf, as that infringes on the user's name space. - Don't have undefined behavior even if the translation - produced a string with the wrong number of "%s"s. */ - char *yyp = yyresult; - int yyi = 0; - while ((*yyp = *yyf) != '\0') - { - if (*yyp == '%' && yyf[1] == 's' && yyi < yycount) - { - yyp += yytnamerr (yyp, yyarg[yyi++]); - yyf += 2; - } - else - { - yyp++; - yyf++; - } - } - } - return yysize; + for (yyx = yyxbegin; yyx < yyxend; ++yyx) + if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR + && !yytable_value_is_error (yytable[yyx + yyn])) + { + if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM) + { + yycount = 1; + yysize = yysize0; + break; + } + yyarg[yycount++] = yytname[yyx]; + yysize1 = yysize + yytnamerr (YY_NULL, yytname[yyx]); + if (! (yysize <= yysize1 + && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) + return 2; + yysize = yysize1; + } + } } + + switch (yycount) + { +# define YYCASE_(N, S) \ + case N: \ + yyformat = S; \ + break + YYCASE_(0, YY_("syntax error")); + YYCASE_(1, YY_("syntax error, unexpected %s")); + YYCASE_(2, YY_("syntax error, unexpected %s, expecting %s")); + YYCASE_(3, YY_("syntax error, unexpected %s, expecting %s or %s")); + YYCASE_(4, YY_("syntax error, unexpected %s, expecting %s or %s or %s")); + YYCASE_(5, YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s")); +# undef YYCASE_ + } + + yysize1 = yysize + yystrlen (yyformat); + if (! (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) + return 2; + yysize = yysize1; + + if (*yymsg_alloc < yysize) + { + *yymsg_alloc = 2 * yysize; + if (! (yysize <= *yymsg_alloc + && *yymsg_alloc <= YYSTACK_ALLOC_MAXIMUM)) + *yymsg_alloc = YYSTACK_ALLOC_MAXIMUM; + return 1; + } + + /* Avoid sprintf, as that infringes on the user's name space. + Don't have undefined behavior even if the translation + produced a string with the wrong number of "%s"s. */ + { + char *yyp = *yymsg; + int yyi = 0; + while ((*yyp = *yyformat) != '\0') + if (*yyp == '%' && yyformat[1] == 's' && yyi < yycount) + { + yyp += yytnamerr (yyp, yyarg[yyi++]); + yyformat += 2; + } + else + { + yyp++; + yyformat++; + } + } + return 0; } #endif /* YYERROR_VERBOSE */ - /*-----------------------------------------------. | Release the memory associated to this symbol. | @@ -1306,25 +1366,21 @@ yydestruct (yymsg, yytype, yyvaluep) } } -/* Prevent warnings from -Wmissing-prototypes. */ -#ifdef YYPARSE_PARAM -#if defined __STDC__ || defined __cplusplus -int yyparse (void *YYPARSE_PARAM); -#else -int yyparse (); -#endif -#else /* ! YYPARSE_PARAM */ -#if defined __STDC__ || defined __cplusplus -int yyparse (void); -#else -int yyparse (); -#endif -#endif /* ! YYPARSE_PARAM */ + /* The lookahead symbol. */ int yychar; + +#ifndef YYLVAL_INITIALIZE +# define YYLVAL_INITIALIZE() +#endif +#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN +# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN +# define YY_IGNORE_MAYBE_UNINITIALIZED_END +#endif + /* The semantic value of the lookahead symbol. */ YYSTYPE yylval; @@ -1332,10 +1388,9 @@ YYSTYPE yylval; int yynerrs; - -/*-------------------------. -| yyparse or yypush_parse. | -`-------------------------*/ +/*----------. +| yyparse. | +`----------*/ #ifdef YYPARSE_PARAM #if (defined __STDC__ || defined __C99__FUNC__ \ @@ -1359,8 +1414,6 @@ yyparse () #endif #endif { - - int yystate; /* Number of tokens to shift before error messages enabled. */ int yyerrstatus; @@ -1369,7 +1422,7 @@ yyparse () `yyss': related to states. `yyvs': related to semantic values. - Refer to the stacks thru separate pointers, to allow yyoverflow + Refer to the stacks through separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ @@ -1387,7 +1440,7 @@ yyparse () int yyn; int yyresult; /* Lookahead token as an internal (translated) token number. */ - int yytoken; + int yytoken = 0; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; @@ -1405,7 +1458,6 @@ yyparse () Keep to zero when no symbol should be popped. */ int yylen = 0; - yytoken = 0; yyss = yyssa; yyvs = yyvsa; yystacksize = YYINITDEPTH; @@ -1424,6 +1476,7 @@ yyparse () yyssp = yyss; yyvsp = yyvs; + YYLVAL_INITIALIZE (); goto yysetstate; /*------------------------------------------------------------. @@ -1515,7 +1568,7 @@ yybackup: /* First try to decide what to do without reference to lookahead token. */ yyn = yypact[yystate]; - if (yyn == YYPACT_NINF) + if (yypact_value_is_default (yyn)) goto yydefault; /* Not known => get a lookahead token if don't already have one. */ @@ -1546,8 +1599,8 @@ yybackup: yyn = yytable[yyn]; if (yyn <= 0) { - if (yyn == 0 || yyn == YYTABLE_NINF) - goto yyerrlab; + if (yytable_value_is_error (yyn)) + goto yyerrlab; yyn = -yyn; goto yyreduce; } @@ -1564,7 +1617,9 @@ yybackup: yychar = YYEMPTY; yystate = yyn; + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN *++yyvsp = yylval; + YY_IGNORE_MAYBE_UNINITIALIZED_END goto yynewstate; @@ -1601,522 +1656,471 @@ yyreduce: switch (yyn) { case 3: - -/* Line 1464 of yacc.c */ -#line 142 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 147 "jamgram.y" { parse_save( (yyvsp[(1) - (1)]).parse ); } break; case 4: - -/* Line 1464 of yacc.c */ -#line 153 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 158 "jamgram.y" { (yyval).parse = (yyvsp[(1) - (1)]).parse; } break; case 5: - -/* Line 1464 of yacc.c */ -#line 155 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 160 "jamgram.y" { (yyval).parse = (yyvsp[(1) - (1)]).parse; } break; case 6: - -/* Line 1464 of yacc.c */ -#line 159 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 164 "jamgram.y" { (yyval).parse = (yyvsp[(1) - (1)]).parse; } break; case 7: - -/* Line 1464 of yacc.c */ -#line 161 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 166 "jamgram.y" { (yyval).parse = prules( (yyvsp[(1) - (2)]).parse, (yyvsp[(2) - (2)]).parse ); } break; case 8: - -/* Line 1464 of yacc.c */ -#line 163 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 168 "jamgram.y" { (yyval).parse = plocal( (yyvsp[(2) - (5)]).parse, (yyvsp[(3) - (5)]).parse, (yyvsp[(5) - (5)]).parse ); } break; case 9: - -/* Line 1464 of yacc.c */ -#line 167 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 172 "jamgram.y" { (yyval).parse = pnull(); } break; case 10: - -/* Line 1464 of yacc.c */ -#line 171 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 176 "jamgram.y" { (yyval).parse = (yyvsp[(2) - (2)]).parse; (yyval).number = ASSIGN_SET; } break; case 11: - -/* Line 1464 of yacc.c */ -#line 173 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 178 "jamgram.y" { (yyval).parse = (yyvsp[(1) - (1)]).parse; (yyval).number = ASSIGN_APPEND; } break; case 12: - -/* Line 1464 of yacc.c */ -#line 177 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 182 "jamgram.y" { (yyval).parse = (yyvsp[(2) - (3)]).parse; } break; case 13: - -/* Line 1464 of yacc.c */ -#line 179 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 184 "jamgram.y" { (yyval).parse = P0; } break; case 14: - -/* Line 1464 of yacc.c */ -#line 183 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 188 "jamgram.y" { (yyval).number = 1; } break; case 15: - -/* Line 1464 of yacc.c */ -#line 185 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 190 "jamgram.y" { (yyval).number = 0; } break; case 16: - -/* Line 1464 of yacc.c */ -#line 189 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 194 "jamgram.y" { (yyval).parse = (yyvsp[(2) - (3)]).parse; } break; case 17: - -/* Line 1464 of yacc.c */ -#line 191 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 196 "jamgram.y" { (yyval).parse = pincl( (yyvsp[(2) - (3)]).parse ); } break; case 18: - -/* Line 1464 of yacc.c */ -#line 193 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 198 "jamgram.y" { (yyval).parse = prule( (yyvsp[(1) - (3)]).string, (yyvsp[(2) - (3)]).parse ); } break; case 19: - -/* Line 1464 of yacc.c */ -#line 195 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 200 "jamgram.y" { (yyval).parse = pset( (yyvsp[(1) - (4)]).parse, (yyvsp[(3) - (4)]).parse, (yyvsp[(2) - (4)]).number ); } break; case 20: - -/* Line 1464 of yacc.c */ -#line 197 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 202 "jamgram.y" { (yyval).parse = pset1( (yyvsp[(1) - (6)]).parse, (yyvsp[(3) - (6)]).parse, (yyvsp[(5) - (6)]).parse, (yyvsp[(4) - (6)]).number ); } break; case 21: - -/* Line 1464 of yacc.c */ -#line 199 "jamgram.y" - { (yyval).parse = (yyvsp[(2) - (3)]).parse; } +/* Line 1813 of yacc.c */ +#line 204 "jamgram.y" + { (yyval).parse = preturn( (yyvsp[(2) - (3)]).parse ); } break; case 22: - -/* Line 1464 of yacc.c */ -#line 201 "jamgram.y" - { (yyval).parse = pfor( (yyvsp[(3) - (8)]).string, (yyvsp[(5) - (8)]).parse, (yyvsp[(7) - (8)]).parse, (yyvsp[(2) - (8)]).number ); } +/* Line 1813 of yacc.c */ +#line 206 "jamgram.y" + { (yyval).parse = pbreak(); } break; case 23: - -/* Line 1464 of yacc.c */ -#line 203 "jamgram.y" - { (yyval).parse = pswitch( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } +/* Line 1813 of yacc.c */ +#line 208 "jamgram.y" + { (yyval).parse = pcontinue(); } break; case 24: - -/* Line 1464 of yacc.c */ -#line 205 "jamgram.y" - { (yyval).parse = pif( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse, pnull() ); } +/* Line 1813 of yacc.c */ +#line 210 "jamgram.y" + { (yyval).parse = pfor( (yyvsp[(3) - (8)]).string, (yyvsp[(5) - (8)]).parse, (yyvsp[(7) - (8)]).parse, (yyvsp[(2) - (8)]).number ); } break; case 25: - -/* Line 1464 of yacc.c */ -#line 207 "jamgram.y" - { (yyval).parse = pmodule( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } +/* Line 1813 of yacc.c */ +#line 212 "jamgram.y" + { (yyval).parse = pswitch( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } break; case 26: - -/* Line 1464 of yacc.c */ -#line 209 "jamgram.y" - { (yyval).parse = pclass( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } +/* Line 1813 of yacc.c */ +#line 214 "jamgram.y" + { (yyval).parse = pif( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse, pnull() ); } break; case 27: - -/* Line 1464 of yacc.c */ -#line 211 "jamgram.y" - { (yyval).parse = pwhile( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } +/* Line 1813 of yacc.c */ +#line 216 "jamgram.y" + { (yyval).parse = pmodule( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } break; case 28: - -/* Line 1464 of yacc.c */ -#line 213 "jamgram.y" - { (yyval).parse = pif( (yyvsp[(2) - (7)]).parse, (yyvsp[(4) - (7)]).parse, (yyvsp[(7) - (7)]).parse ); } +/* Line 1813 of yacc.c */ +#line 218 "jamgram.y" + { (yyval).parse = pclass( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } break; case 29: - -/* Line 1464 of yacc.c */ -#line 215 "jamgram.y" - { (yyval).parse = psetc( (yyvsp[(3) - (5)]).string, (yyvsp[(5) - (5)]).parse, (yyvsp[(4) - (5)]).parse, (yyvsp[(1) - (5)]).number ); } +/* Line 1813 of yacc.c */ +#line 220 "jamgram.y" + { (yyval).parse = pwhile( (yyvsp[(2) - (5)]).parse, (yyvsp[(4) - (5)]).parse ); } break; case 30: - -/* Line 1464 of yacc.c */ -#line 217 "jamgram.y" - { (yyval).parse = pon( (yyvsp[(2) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } +/* Line 1813 of yacc.c */ +#line 222 "jamgram.y" + { (yyval).parse = pif( (yyvsp[(2) - (7)]).parse, (yyvsp[(4) - (7)]).parse, (yyvsp[(7) - (7)]).parse ); } break; case 31: - -/* Line 1464 of yacc.c */ -#line 219 "jamgram.y" - { yymode( SCAN_STRING ); } +/* Line 1813 of yacc.c */ +#line 224 "jamgram.y" + { (yyval).parse = psetc( (yyvsp[(3) - (5)]).string, (yyvsp[(5) - (5)]).parse, (yyvsp[(4) - (5)]).parse, (yyvsp[(1) - (5)]).number ); } break; case 32: - -/* Line 1464 of yacc.c */ -#line 221 "jamgram.y" - { yymode( SCAN_NORMAL ); } +/* Line 1813 of yacc.c */ +#line 226 "jamgram.y" + { (yyval).parse = pon( (yyvsp[(2) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } break; case 33: - -/* Line 1464 of yacc.c */ -#line 223 "jamgram.y" - { (yyval).parse = psete( (yyvsp[(3) - (9)]).string,(yyvsp[(4) - (9)]).parse,(yyvsp[(7) - (9)]).string,(yyvsp[(2) - (9)]).number ); } +/* Line 1813 of yacc.c */ +#line 228 "jamgram.y" + { yymode( SCAN_STRING ); } break; case 34: - -/* Line 1464 of yacc.c */ -#line 231 "jamgram.y" - { (yyval).number = ASSIGN_SET; } - break; - - case 35: - -/* Line 1464 of yacc.c */ -#line 233 "jamgram.y" - { (yyval).number = ASSIGN_APPEND; } - break; - - case 36: - -/* Line 1464 of yacc.c */ -#line 235 "jamgram.y" - { (yyval).number = ASSIGN_DEFAULT; } - break; - - case 37: - -/* Line 1464 of yacc.c */ -#line 237 "jamgram.y" - { (yyval).number = ASSIGN_DEFAULT; } - break; - - case 38: - -/* Line 1464 of yacc.c */ -#line 244 "jamgram.y" - { (yyval).parse = peval( EXPR_EXISTS, (yyvsp[(1) - (1)]).parse, pnull() ); } - break; - - case 39: - -/* Line 1464 of yacc.c */ -#line 246 "jamgram.y" - { (yyval).parse = peval( EXPR_EQUALS, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 40: - -/* Line 1464 of yacc.c */ -#line 248 "jamgram.y" - { (yyval).parse = peval( EXPR_NOTEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 41: - -/* Line 1464 of yacc.c */ -#line 250 "jamgram.y" - { (yyval).parse = peval( EXPR_LESS, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 42: - -/* Line 1464 of yacc.c */ -#line 252 "jamgram.y" - { (yyval).parse = peval( EXPR_LESSEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 43: - -/* Line 1464 of yacc.c */ -#line 254 "jamgram.y" - { (yyval).parse = peval( EXPR_MORE, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 44: - -/* Line 1464 of yacc.c */ -#line 256 "jamgram.y" - { (yyval).parse = peval( EXPR_MOREEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 45: - -/* Line 1464 of yacc.c */ -#line 258 "jamgram.y" - { (yyval).parse = peval( EXPR_AND, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 46: - -/* Line 1464 of yacc.c */ -#line 260 "jamgram.y" - { (yyval).parse = peval( EXPR_AND, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 47: - -/* Line 1464 of yacc.c */ -#line 262 "jamgram.y" - { (yyval).parse = peval( EXPR_OR, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 48: - -/* Line 1464 of yacc.c */ -#line 264 "jamgram.y" - { (yyval).parse = peval( EXPR_OR, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 49: - -/* Line 1464 of yacc.c */ -#line 266 "jamgram.y" - { (yyval).parse = peval( EXPR_IN, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } - break; - - case 50: - -/* Line 1464 of yacc.c */ -#line 268 "jamgram.y" - { (yyval).parse = peval( EXPR_NOT, (yyvsp[(2) - (2)]).parse, pnull() ); } - break; - - case 51: - -/* Line 1464 of yacc.c */ -#line 270 "jamgram.y" - { (yyval).parse = (yyvsp[(2) - (3)]).parse; } - break; - - case 52: - -/* Line 1464 of yacc.c */ -#line 281 "jamgram.y" - { (yyval).parse = P0; } - break; - - case 53: - -/* Line 1464 of yacc.c */ -#line 283 "jamgram.y" - { (yyval).parse = pnode( (yyvsp[(1) - (2)]).parse, (yyvsp[(2) - (2)]).parse ); } - break; - - case 54: - -/* Line 1464 of yacc.c */ -#line 287 "jamgram.y" - { (yyval).parse = psnode( (yyvsp[(2) - (4)]).string, (yyvsp[(4) - (4)]).parse ); } - break; - - case 55: - -/* Line 1464 of yacc.c */ -#line 296 "jamgram.y" - { (yyval).parse = pnode( P0, (yyvsp[(1) - (1)]).parse ); } - break; - - case 56: - -/* Line 1464 of yacc.c */ -#line 298 "jamgram.y" - { (yyval).parse = pnode( (yyvsp[(3) - (3)]).parse, (yyvsp[(1) - (3)]).parse ); } - break; - - case 57: - -/* Line 1464 of yacc.c */ -#line 308 "jamgram.y" - { (yyval).parse = (yyvsp[(1) - (1)]).parse; yymode( SCAN_NORMAL ); } - break; - - case 58: - -/* Line 1464 of yacc.c */ -#line 312 "jamgram.y" - { (yyval).parse = pnull(); yymode( SCAN_PUNCT ); } - break; - - case 59: - -/* Line 1464 of yacc.c */ -#line 314 "jamgram.y" - { (yyval).parse = pappend( (yyvsp[(1) - (2)]).parse, (yyvsp[(2) - (2)]).parse ); } - break; - - case 60: - -/* Line 1464 of yacc.c */ -#line 318 "jamgram.y" - { (yyval).parse = plist( (yyvsp[(1) - (1)]).string ); } - break; - - case 61: - -/* Line 1464 of yacc.c */ -#line 319 "jamgram.y" +/* Line 1813 of yacc.c */ +#line 230 "jamgram.y" { yymode( SCAN_NORMAL ); } break; - case 62: + case 35: +/* Line 1813 of yacc.c */ +#line 232 "jamgram.y" + { (yyval).parse = psete( (yyvsp[(3) - (9)]).string,(yyvsp[(4) - (9)]).parse,(yyvsp[(7) - (9)]).string,(yyvsp[(2) - (9)]).number ); } + break; -/* Line 1464 of yacc.c */ -#line 320 "jamgram.y" - { (yyval).parse = (yyvsp[(3) - (4)]).parse; } + case 36: +/* Line 1813 of yacc.c */ +#line 240 "jamgram.y" + { (yyval).number = ASSIGN_SET; } + break; + + case 37: +/* Line 1813 of yacc.c */ +#line 242 "jamgram.y" + { (yyval).number = ASSIGN_APPEND; } + break; + + case 38: +/* Line 1813 of yacc.c */ +#line 244 "jamgram.y" + { (yyval).number = ASSIGN_DEFAULT; } + break; + + case 39: +/* Line 1813 of yacc.c */ +#line 246 "jamgram.y" + { (yyval).number = ASSIGN_DEFAULT; } + break; + + case 40: +/* Line 1813 of yacc.c */ +#line 253 "jamgram.y" + { (yyval).parse = peval( EXPR_EXISTS, (yyvsp[(1) - (1)]).parse, pnull() ); } + break; + + case 41: +/* Line 1813 of yacc.c */ +#line 255 "jamgram.y" + { (yyval).parse = peval( EXPR_EQUALS, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 42: +/* Line 1813 of yacc.c */ +#line 257 "jamgram.y" + { (yyval).parse = peval( EXPR_NOTEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 43: +/* Line 1813 of yacc.c */ +#line 259 "jamgram.y" + { (yyval).parse = peval( EXPR_LESS, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 44: +/* Line 1813 of yacc.c */ +#line 261 "jamgram.y" + { (yyval).parse = peval( EXPR_LESSEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 45: +/* Line 1813 of yacc.c */ +#line 263 "jamgram.y" + { (yyval).parse = peval( EXPR_MORE, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 46: +/* Line 1813 of yacc.c */ +#line 265 "jamgram.y" + { (yyval).parse = peval( EXPR_MOREEQ, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 47: +/* Line 1813 of yacc.c */ +#line 267 "jamgram.y" + { (yyval).parse = peval( EXPR_AND, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 48: +/* Line 1813 of yacc.c */ +#line 269 "jamgram.y" + { (yyval).parse = peval( EXPR_AND, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 49: +/* Line 1813 of yacc.c */ +#line 271 "jamgram.y" + { (yyval).parse = peval( EXPR_OR, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 50: +/* Line 1813 of yacc.c */ +#line 273 "jamgram.y" + { (yyval).parse = peval( EXPR_OR, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 51: +/* Line 1813 of yacc.c */ +#line 275 "jamgram.y" + { (yyval).parse = peval( EXPR_IN, (yyvsp[(1) - (3)]).parse, (yyvsp[(3) - (3)]).parse ); } + break; + + case 52: +/* Line 1813 of yacc.c */ +#line 277 "jamgram.y" + { (yyval).parse = peval( EXPR_NOT, (yyvsp[(2) - (2)]).parse, pnull() ); } + break; + + case 53: +/* Line 1813 of yacc.c */ +#line 279 "jamgram.y" + { (yyval).parse = (yyvsp[(2) - (3)]).parse; } + break; + + case 54: +/* Line 1813 of yacc.c */ +#line 290 "jamgram.y" + { (yyval).parse = P0; } + break; + + case 55: +/* Line 1813 of yacc.c */ +#line 292 "jamgram.y" + { (yyval).parse = pnode( (yyvsp[(1) - (2)]).parse, (yyvsp[(2) - (2)]).parse ); } + break; + + case 56: +/* Line 1813 of yacc.c */ +#line 296 "jamgram.y" + { (yyval).parse = psnode( (yyvsp[(2) - (4)]).string, (yyvsp[(4) - (4)]).parse ); } + break; + + case 57: +/* Line 1813 of yacc.c */ +#line 305 "jamgram.y" + { (yyval).parse = pnode( P0, (yyvsp[(1) - (1)]).parse ); } + break; + + case 58: +/* Line 1813 of yacc.c */ +#line 307 "jamgram.y" + { (yyval).parse = pnode( (yyvsp[(3) - (3)]).parse, (yyvsp[(1) - (3)]).parse ); } + break; + + case 59: +/* Line 1813 of yacc.c */ +#line 317 "jamgram.y" + { (yyval).parse = (yyvsp[(1) - (1)]).parse; yymode( SCAN_NORMAL ); } + break; + + case 60: +/* Line 1813 of yacc.c */ +#line 321 "jamgram.y" + { (yyval).parse = pnull(); yymode( SCAN_PUNCT ); } + break; + + case 61: +/* Line 1813 of yacc.c */ +#line 323 "jamgram.y" + { (yyval).parse = pappend( (yyvsp[(1) - (2)]).parse, (yyvsp[(2) - (2)]).parse ); } + break; + + case 62: +/* Line 1813 of yacc.c */ +#line 327 "jamgram.y" + { (yyval).parse = plist( (yyvsp[(1) - (1)]).string ); } break; case 63: - -/* Line 1464 of yacc.c */ -#line 329 "jamgram.y" - { (yyval).parse = prule( (yyvsp[(1) - (2)]).string, (yyvsp[(2) - (2)]).parse ); } +/* Line 1813 of yacc.c */ +#line 328 "jamgram.y" + { yymode( SCAN_NORMAL ); } break; case 64: - -/* Line 1464 of yacc.c */ -#line 331 "jamgram.y" - { (yyval).parse = pon( (yyvsp[(2) - (4)]).parse, prule( (yyvsp[(3) - (4)]).string, (yyvsp[(4) - (4)]).parse ) ); } +/* Line 1813 of yacc.c */ +#line 329 "jamgram.y" + { (yyval).parse = (yyvsp[(3) - (4)]).parse; } break; case 65: - -/* Line 1464 of yacc.c */ -#line 333 "jamgram.y" - { (yyval).parse = pon( (yyvsp[(2) - (4)]).parse, (yyvsp[(4) - (4)]).parse ); } +/* Line 1813 of yacc.c */ +#line 338 "jamgram.y" + { (yyval).parse = prule( (yyvsp[(1) - (2)]).string, (yyvsp[(2) - (2)]).parse ); } break; case 66: - -/* Line 1464 of yacc.c */ -#line 343 "jamgram.y" - { (yyval).number = 0; } +/* Line 1813 of yacc.c */ +#line 340 "jamgram.y" + { (yyval).parse = pon( (yyvsp[(2) - (4)]).parse, prule( (yyvsp[(3) - (4)]).string, (yyvsp[(4) - (4)]).parse ) ); } break; case 67: - -/* Line 1464 of yacc.c */ -#line 345 "jamgram.y" - { (yyval).number = (yyvsp[(1) - (2)]).number | (yyvsp[(2) - (2)]).number; } +/* Line 1813 of yacc.c */ +#line 342 "jamgram.y" + { (yyval).parse = pon( (yyvsp[(2) - (4)]).parse, (yyvsp[(4) - (4)]).parse ); } break; case 68: - -/* Line 1464 of yacc.c */ -#line 349 "jamgram.y" - { (yyval).number = EXEC_UPDATED; } +/* Line 1813 of yacc.c */ +#line 352 "jamgram.y" + { (yyval).number = 0; } break; case 69: - -/* Line 1464 of yacc.c */ -#line 351 "jamgram.y" - { (yyval).number = EXEC_TOGETHER; } +/* Line 1813 of yacc.c */ +#line 354 "jamgram.y" + { (yyval).number = (yyvsp[(1) - (2)]).number | (yyvsp[(2) - (2)]).number; } break; case 70: - -/* Line 1464 of yacc.c */ -#line 353 "jamgram.y" - { (yyval).number = EXEC_IGNORE; } +/* Line 1813 of yacc.c */ +#line 358 "jamgram.y" + { (yyval).number = EXEC_UPDATED; } break; case 71: - -/* Line 1464 of yacc.c */ -#line 355 "jamgram.y" - { (yyval).number = EXEC_QUIETLY; } +/* Line 1813 of yacc.c */ +#line 360 "jamgram.y" + { (yyval).number = EXEC_TOGETHER; } break; case 72: - -/* Line 1464 of yacc.c */ -#line 357 "jamgram.y" - { (yyval).number = EXEC_PIECEMEAL; } +/* Line 1813 of yacc.c */ +#line 362 "jamgram.y" + { (yyval).number = EXEC_IGNORE; } break; case 73: - -/* Line 1464 of yacc.c */ -#line 359 "jamgram.y" - { (yyval).number = EXEC_EXISTING; } +/* Line 1813 of yacc.c */ +#line 364 "jamgram.y" + { (yyval).number = EXEC_QUIETLY; } break; case 74: - -/* Line 1464 of yacc.c */ -#line 368 "jamgram.y" - { (yyval).parse = pnull(); } +/* Line 1813 of yacc.c */ +#line 366 "jamgram.y" + { (yyval).number = EXEC_PIECEMEAL; } break; case 75: +/* Line 1813 of yacc.c */ +#line 368 "jamgram.y" + { (yyval).number = EXEC_EXISTING; } + break; -/* Line 1464 of yacc.c */ -#line 370 "jamgram.y" + case 76: +/* Line 1813 of yacc.c */ +#line 377 "jamgram.y" + { (yyval).parse = pnull(); } + break; + + case 77: +/* Line 1813 of yacc.c */ +#line 379 "jamgram.y" { (yyval).parse = (yyvsp[(2) - (2)]).parse; } break; - -/* Line 1464 of yacc.c */ -#line 2118 "y.tab.c" +/* Line 1813 of yacc.c */ +#line 2111 "y.tab.c" default: break; } + /* User semantic actions sometimes alter yychar, and that requires + that yytoken be updated with the new translation. We take the + approach of translating immediately before every use of yytoken. + One alternative is translating here after every semantic action, + but that translation would be missed if the semantic action invokes + YYABORT, YYACCEPT, or YYERROR immediately after altering yychar or + if it invokes YYBACKUP. In the case of YYABORT or YYACCEPT, an + incorrect destructor might then be invoked immediately. In the + case of YYERROR or YYBACKUP, subsequent parser actions might lead + to an incorrect destructor call or verbose syntax error message + before the lookahead is translated. */ YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyn], &yyval, &yyloc); YYPOPSTACK (yylen); @@ -2144,6 +2148,10 @@ yyreduce: | yyerrlab -- here on detecting error | `------------------------------------*/ yyerrlab: + /* Make sure we have latest lookahead translation. See comments at + user semantic actions for why this is necessary. */ + yytoken = yychar == YYEMPTY ? YYEMPTY : YYTRANSLATE (yychar); + /* If not already recovering from an error, report this error. */ if (!yyerrstatus) { @@ -2151,37 +2159,36 @@ yyerrlab: #if ! YYERROR_VERBOSE yyerror (YY_("syntax error")); #else +# define YYSYNTAX_ERROR yysyntax_error (&yymsg_alloc, &yymsg, \ + yyssp, yytoken) { - YYSIZE_T yysize = yysyntax_error (0, yystate, yychar); - if (yymsg_alloc < yysize && yymsg_alloc < YYSTACK_ALLOC_MAXIMUM) - { - YYSIZE_T yyalloc = 2 * yysize; - if (! (yysize <= yyalloc && yyalloc <= YYSTACK_ALLOC_MAXIMUM)) - yyalloc = YYSTACK_ALLOC_MAXIMUM; - if (yymsg != yymsgbuf) - YYSTACK_FREE (yymsg); - yymsg = (char *) YYSTACK_ALLOC (yyalloc); - if (yymsg) - yymsg_alloc = yyalloc; - else - { - yymsg = yymsgbuf; - yymsg_alloc = sizeof yymsgbuf; - } - } - - if (0 < yysize && yysize <= yymsg_alloc) - { - (void) yysyntax_error (yymsg, yystate, yychar); - yyerror (yymsg); - } - else - { - yyerror (YY_("syntax error")); - if (yysize != 0) - goto yyexhaustedlab; - } + char const *yymsgp = YY_("syntax error"); + int yysyntax_error_status; + yysyntax_error_status = YYSYNTAX_ERROR; + if (yysyntax_error_status == 0) + yymsgp = yymsg; + else if (yysyntax_error_status == 1) + { + if (yymsg != yymsgbuf) + YYSTACK_FREE (yymsg); + yymsg = (char *) YYSTACK_ALLOC (yymsg_alloc); + if (!yymsg) + { + yymsg = yymsgbuf; + yymsg_alloc = sizeof yymsgbuf; + yysyntax_error_status = 2; + } + else + { + yysyntax_error_status = YYSYNTAX_ERROR; + yymsgp = yymsg; + } + } + yyerror (yymsgp); + if (yysyntax_error_status == 2) + goto yyexhaustedlab; } +# undef YYSYNTAX_ERROR #endif } @@ -2240,7 +2247,7 @@ yyerrlab1: for (;;) { yyn = yypact[yystate]; - if (yyn != YYPACT_NINF) + if (!yypact_value_is_default (yyn)) { yyn += YYTERROR; if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) @@ -2263,7 +2270,9 @@ yyerrlab1: YY_STACK_PRINT (yyss, yyssp); } + YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN *++yyvsp = yylval; + YY_IGNORE_MAYBE_UNINITIALIZED_END /* Shift the error token. */ @@ -2287,7 +2296,7 @@ yyabortlab: yyresult = 1; goto yyreturn; -#if !defined(yyoverflow) || YYERROR_VERBOSE +#if !defined yyoverflow || YYERROR_VERBOSE /*-------------------------------------------------. | yyexhaustedlab -- memory exhaustion comes here. | `-------------------------------------------------*/ @@ -2299,8 +2308,13 @@ yyexhaustedlab: yyreturn: if (yychar != YYEMPTY) - yydestruct ("Cleanup: discarding lookahead", - yytoken, &yylval); + { + /* Make sure we have latest lookahead translation. See comments at + user semantic actions for why this is necessary. */ + yytoken = YYTRANSLATE (yychar); + yydestruct ("Cleanup: discarding lookahead", + yytoken, &yylval); + } /* Do not reclaim the symbols of the rule which action triggered this YYABORT or YYACCEPT. */ YYPOPSTACK (yylen); @@ -2324,4 +2338,3 @@ yyreturn: } - diff --git a/src/engine/jamgram.h b/src/engine/jamgram.h index 97f117535..23111972c 100644 --- a/src/engine/jamgram.h +++ b/src/engine/jamgram.h @@ -1,9 +1,8 @@ -/* A Bison parser, made by GNU Bison 2.4.3. */ +/* A Bison parser, made by GNU Bison 2.6.4. */ -/* Skeleton interface for Bison's Yacc-like parsers in C +/* Bison interface for Yacc-like parsers in C - Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006, - 2009, 2010 Free Software Foundation, Inc. + Copyright (C) 1984, 1989-1990, 2000-2012 Free Software Foundation, Inc. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -31,6 +30,15 @@ This special exception was added by the Free Software Foundation in version 2.2 of Bison. */ +#ifndef YY_YY_Y_TAB_H_INCLUDED +# define YY_YY_Y_TAB_H_INCLUDED +/* Enabling traces. */ +#ifndef YYDEBUG +# define YYDEBUG 1 +#endif +#if YYDEBUG +extern int yydebug; +#endif /* Tokens. */ #ifndef YYTOKENTYPE @@ -57,33 +65,35 @@ _RBRACKET_t = 274, ACTIONS_t = 275, BIND_t = 276, - CASE_t = 277, - CLASS_t = 278, - DEFAULT_t = 279, - ELSE_t = 280, - EXISTING_t = 281, - FOR_t = 282, - IF_t = 283, - IGNORE_t = 284, - IN_t = 285, - INCLUDE_t = 286, - LOCAL_t = 287, - MODULE_t = 288, - ON_t = 289, - PIECEMEAL_t = 290, - QUIETLY_t = 291, - RETURN_t = 292, - RULE_t = 293, - SWITCH_t = 294, - TOGETHER_t = 295, - UPDATED_t = 296, - WHILE_t = 297, - _LBRACE_t = 298, - _BAR_t = 299, - _BARBAR_t = 300, - _RBRACE_t = 301, - ARG = 302, - STRING = 303 + BREAK_t = 277, + CASE_t = 278, + CLASS_t = 279, + CONTINUE_t = 280, + DEFAULT_t = 281, + ELSE_t = 282, + EXISTING_t = 283, + FOR_t = 284, + IF_t = 285, + IGNORE_t = 286, + IN_t = 287, + INCLUDE_t = 288, + LOCAL_t = 289, + MODULE_t = 290, + ON_t = 291, + PIECEMEAL_t = 292, + QUIETLY_t = 293, + RETURN_t = 294, + RULE_t = 295, + SWITCH_t = 296, + TOGETHER_t = 297, + UPDATED_t = 298, + WHILE_t = 299, + _LBRACE_t = 300, + _BAR_t = 301, + _BARBAR_t = 302, + _RBRACE_t = 303, + ARG = 304, + STRING = 305 }; #endif /* Tokens. */ @@ -106,34 +116,35 @@ #define _RBRACKET_t 274 #define ACTIONS_t 275 #define BIND_t 276 -#define CASE_t 277 -#define CLASS_t 278 -#define DEFAULT_t 279 -#define ELSE_t 280 -#define EXISTING_t 281 -#define FOR_t 282 -#define IF_t 283 -#define IGNORE_t 284 -#define IN_t 285 -#define INCLUDE_t 286 -#define LOCAL_t 287 -#define MODULE_t 288 -#define ON_t 289 -#define PIECEMEAL_t 290 -#define QUIETLY_t 291 -#define RETURN_t 292 -#define RULE_t 293 -#define SWITCH_t 294 -#define TOGETHER_t 295 -#define UPDATED_t 296 -#define WHILE_t 297 -#define _LBRACE_t 298 -#define _BAR_t 299 -#define _BARBAR_t 300 -#define _RBRACE_t 301 -#define ARG 302 -#define STRING 303 - +#define BREAK_t 277 +#define CASE_t 278 +#define CLASS_t 279 +#define CONTINUE_t 280 +#define DEFAULT_t 281 +#define ELSE_t 282 +#define EXISTING_t 283 +#define FOR_t 284 +#define IF_t 285 +#define IGNORE_t 286 +#define IN_t 287 +#define INCLUDE_t 288 +#define LOCAL_t 289 +#define MODULE_t 290 +#define ON_t 291 +#define PIECEMEAL_t 292 +#define QUIETLY_t 293 +#define RETURN_t 294 +#define RULE_t 295 +#define SWITCH_t 296 +#define TOGETHER_t 297 +#define UPDATED_t 298 +#define WHILE_t 299 +#define _LBRACE_t 300 +#define _BAR_t 301 +#define _BARBAR_t 302 +#define _RBRACE_t 303 +#define ARG 304 +#define STRING 305 @@ -146,4 +157,18 @@ typedef int YYSTYPE; extern YYSTYPE yylval; +#ifdef YYPARSE_PARAM +#if defined __STDC__ || defined __cplusplus +int yyparse (void *YYPARSE_PARAM); +#else +int yyparse (); +#endif +#else /* ! YYPARSE_PARAM */ +#if defined __STDC__ || defined __cplusplus +int yyparse (void); +#else +int yyparse (); +#endif +#endif /* ! YYPARSE_PARAM */ +#endif /* !YY_YY_Y_TAB_H_INCLUDED */ diff --git a/src/engine/jamgram.y b/src/engine/jamgram.y index 543f1561a..2e980b8f7 100644 --- a/src/engine/jamgram.y +++ b/src/engine/jamgram.y @@ -17,8 +17,10 @@ %token _RBRACKET_t %token ACTIONS_t %token BIND_t +%token BREAK_t %token CASE_t %token CLASS_t +%token CONTINUE_t %token DEFAULT_t %token ELSE_t %token EXISTING_t @@ -128,6 +130,9 @@ # define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) # define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) # define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) # define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) # define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) @@ -196,7 +201,11 @@ rule : _LBRACE_t block _RBRACE_t | arg ON_t list assign list _SEMIC_t { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); } | RETURN_t list _SEMIC_t - { $$.parse = $2.parse; } + { $$.parse = preturn( $2.parse ); } + | BREAK_t _SEMIC_t + { $$.parse = pbreak(); } + | CONTINUE_t _SEMIC_t + { $$.parse = pcontinue(); } | FOR_t local_opt ARG IN_t list _LBRACE_t block _RBRACE_t { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); } | SWITCH_t list _LBRACE_t cases _RBRACE_t diff --git a/src/engine/jamgram.yy b/src/engine/jamgram.yy index 8d20e3896..a11556b52 100644 --- a/src/engine/jamgram.yy +++ b/src/engine/jamgram.yy @@ -84,6 +84,9 @@ # define psete( s,l,s1,f ) parse_make( PARSE_SETEXEC,l,P0,P0,s,s1,f ) # define pswitch( l,r ) parse_make( PARSE_SWITCH,l,r,P0,S0,S0,0 ) # define pwhile( l,r ) parse_make( PARSE_WHILE,l,r,P0,S0,S0,0 ) +# define preturn( l ) parse_make( PARSE_RETURN,l,P0,P0,S0,S0,0 ) +# define pbreak() parse_make( PARSE_BREAK,P0,P0,P0,S0,S0,0 ) +# define pcontinue() parse_make( PARSE_CONTINUE,P0,P0,P0,S0,S0,0 ) # define pnode( l,r ) parse_make( F0,l,r,P0,S0,S0,0 ) # define psnode( s,l ) parse_make( F0,l,P0,P0,s,S0,0 ) @@ -152,7 +155,11 @@ rule : `{` block `}` | arg `on` list assign list `;` { $$.parse = pset1( $1.parse, $3.parse, $5.parse, $4.number ); } | `return` list `;` - { $$.parse = $2.parse; } + { $$.parse = preturn( $2.parse ); } + | `break` `;` + { $$.parse = pbreak(); } + | `continue` `;` + { $$.parse = pcontinue(); } | `for` local_opt ARG `in` list `{` block `}` { $$.parse = pfor( $3.string, $5.parse, $7.parse, $2.number ); } | `switch` list `{` cases `}` diff --git a/src/engine/jamgramtab.h b/src/engine/jamgramtab.h index a0fd43f6a..38a810871 100644 --- a/src/engine/jamgramtab.h +++ b/src/engine/jamgramtab.h @@ -17,8 +17,10 @@ { "]", _RBRACKET_t }, { "actions", ACTIONS_t }, { "bind", BIND_t }, + { "break", BREAK_t }, { "case", CASE_t }, { "class", CLASS_t }, + { "continue", CONTINUE_t }, { "default", DEFAULT_t }, { "else", ELSE_t }, { "existing", EXISTING_t }, diff --git a/src/engine/lists.c b/src/engine/lists.c index 3f2309b05..065145e00 100644 --- a/src/engine/lists.c +++ b/src/engine/lists.c @@ -10,6 +10,7 @@ #include "jam.h" #include "lists.h" +#include "output.h" #include @@ -114,7 +115,7 @@ LIST * list_push_back( LIST * head, OBJECT * value ) unsigned int i; if ( DEBUG_LISTS ) - printf( "list > %s <\n", object_str( value ) ); + out_printf( "list > %s <\n", object_str( value ) ); /* If the size is a power of 2, reallocate. */ if ( size == 0 ) @@ -318,10 +319,10 @@ void list_print( LIST * l ) LISTITER iter = list_begin( l ), end = list_end( l ); if ( iter != end ) { - printf( "%s", object_str( list_item( iter ) ) ); + out_printf( "%s", object_str( list_item( iter ) ) ); iter = list_next( iter ); for ( ; iter != end; iter = list_next( iter ) ) - printf( " %s", object_str( list_item( iter ) ) ); + out_printf( " %s", object_str( list_item( iter ) ) ); } } @@ -434,7 +435,7 @@ void lol_print( LOL * lol ) for ( i = 0; i < lol->count; ++i ) { if ( i ) - printf( " : " ); + out_printf( " : " ); list_print( lol->list[ i ] ); } } diff --git a/src/engine/make.c b/src/engine/make.c index c83f525c8..56a674503 100644 --- a/src/engine/make.c +++ b/src/engine/make.c @@ -129,19 +129,19 @@ int make( LIST * targets, int anyhow ) if ( DEBUG_MAKE ) { if ( counts->targets ) - printf( "...found %d target%s...\n", counts->targets, + out_printf( "...found %d target%s...\n", counts->targets, counts->targets > 1 ? "s" : "" ); if ( counts->temp ) - printf( "...using %d temp target%s...\n", counts->temp, + out_printf( "...using %d temp target%s...\n", counts->temp, counts->temp > 1 ? "s" : "" ); if ( counts->updating ) - printf( "...updating %d target%s...\n", counts->updating, + out_printf( "...updating %d target%s...\n", counts->updating, counts->updating > 1 ? "s" : "" ); if ( counts->cantfind ) - printf( "...can't find %d target%s...\n", counts->cantfind, + out_printf( "...can't find %d target%s...\n", counts->cantfind, counts->cantfind > 1 ? "s" : "" ); if ( counts->cantmake ) - printf( "...can't make %d target%s...\n", counts->cantmake, + out_printf( "...can't make %d target%s...\n", counts->cantmake, counts->cantmake > 1 ? "s" : "" ); } @@ -181,7 +181,7 @@ static void update_dependants( TARGET * t ) if ( DEBUG_FATE ) { - printf( "fate change %s from %s to %s (as dependant of %s)\n", + out_printf( "fate change %s from %s to %s (as dependant of %s)\n", object_str( p->name ), target_fate[ (int) fate0 ], target_fate[ (int) p->fate ], object_str( t->name ) ); } @@ -212,7 +212,7 @@ static void force_rebuilds( TARGET * t ) if ( r->fate < T_FATE_BUILD ) { if ( DEBUG_FATE ) - printf( "fate change %s from %s to %s (by rebuild)\n", + out_printf( "fate change %s from %s to %s (by rebuild)\n", object_str( r->name ), target_fate[ (int) r->fate ], target_fate[ T_FATE_REBUILD ] ); /* Force rebuild it. */ @@ -297,14 +297,14 @@ void make0 #endif if ( DEBUG_MAKEPROG ) - printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); + out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); /* * Step 1: Initialize. */ if ( DEBUG_MAKEPROG ) - printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); + out_printf( "make\t--\t%s%s\n", spaces( depth ), object_str( t->name ) ); t->fate = T_FATE_MAKING; t->depth = depth; @@ -377,7 +377,7 @@ void make0 if ( DEBUG_BIND ) { if ( !object_equal( t->name, t->boundname ) ) - printf( "bind\t--\t%s%s: %s\n", spaces( depth ), + out_printf( "bind\t--\t%s%s: %s\n", spaces( depth ), object_str( t->name ), object_str( t->boundname ) ); switch ( t->binding ) @@ -385,12 +385,12 @@ void make0 case T_BIND_UNBOUND: case T_BIND_MISSING: case T_BIND_PARENTS: - printf( "time\t--\t%s%s: %s\n", spaces( depth ), + out_printf( "time\t--\t%s%s: %s\n", spaces( depth ), object_str( t->name ), target_bind[ (int)t->binding ] ); break; case T_BIND_EXISTS: - printf( "time\t--\t%s%s: %s\n", spaces( depth ), + out_printf( "time\t--\t%s%s: %s\n", spaces( depth ), object_str( t->name ), timestamp_str( &t->time ) ); break; } @@ -411,7 +411,7 @@ void make0 if ( c->target->fate == T_FATE_INIT ) make0( c->target, ptime, depth + 1, counts, anyhow, rescanning ); else if ( c->target->fate == T_FATE_MAKING && !internal ) - printf( "warning: %s depends on itself\n", object_str( + out_printf( "warning: %s depends on itself\n", object_str( c->target->name ) ); else if ( c->target->fate != T_FATE_MAKING && rescanning ) make0rescan( c->target, rescanning ); @@ -505,7 +505,7 @@ void make0 #ifdef OPT_GRAPH_DEBUG_EXT if ( DEBUG_FATE ) if ( fate < c->target->fate ) - printf( "fate change %s from %s to %s by dependency %s\n", + out_printf( "fate change %s from %s to %s by dependency %s\n", object_str( t->name ), target_fate[ (int)fate ], target_fate[ (int)c->target->fate ], object_str( c->target->name ) ); @@ -534,7 +534,7 @@ void make0 #ifdef OPT_GRAPH_DEBUG_EXT if ( DEBUG_FATE ) if ( fate != T_FATE_STABLE ) - printf( "fate change %s back to stable, NOUPDATE.\n", + out_printf( "fate change %s back to stable, NOUPDATE.\n", object_str( t->name ) ); #endif @@ -632,10 +632,10 @@ void make0 if ( DEBUG_FATE && ( fate != savedFate ) ) { if ( savedFate == T_FATE_STABLE ) - printf( "fate change %s set to %s%s\n", object_str( t->name ), + out_printf( "fate change %s set to %s%s\n", object_str( t->name ), target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" ); else - printf( "fate change %s from %s to %s%s\n", object_str( t->name ), + out_printf( "fate change %s from %s to %s%s\n", object_str( t->name ), target_fate[ savedFate ], target_fate[ fate ], oldTimeStamp ? " (by timestamp)" : "" ); } @@ -653,7 +653,7 @@ void make0 { #ifdef OPT_GRAPH_DEBUG_EXT if ( DEBUG_FATE ) - printf( "fate change %s to STABLE from %s, " + out_printf( "fate change %s to STABLE from %s, " "no actions, no dependencies and do not care\n", object_str( t->name ), target_fate[ fate ] ); #endif @@ -661,7 +661,7 @@ void make0 } else { - printf( "don't know how to make %s\n", object_str( t->name ) ); + out_printf( "don't know how to make %s\n", object_str( t->name ) ); fate = T_FATE_CANTFIND; } } @@ -731,8 +731,8 @@ void make0 #else if ( !( ++counts->targets % 1000 ) && DEBUG_MAKE ) { - printf( "...patience...\n" ); - fflush(stdout); + out_printf( "...patience...\n" ); + out_flush(); } #endif @@ -754,7 +754,7 @@ void make0 flag = "*"; if ( DEBUG_MAKEPROG ) - printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int)t->fate ], + out_printf( "made%s\t%s\t%s%s\n", flag, target_fate[ (int)t->fate ], spaces( depth ), object_str( t->name ) ); } @@ -792,76 +792,76 @@ static void dependGraphOutput( TARGET * t, int depth ) case T_FATE_MISSING: case T_FATE_OUTDATED: case T_FATE_UPDATE: - printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t + out_printf( "->%s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) ); break; default: - printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t + out_printf( " %s%2d Name: %s\n", spaces( depth ), depth, target_name( t ) ); break; } if ( !object_equal( t->name, t->boundname ) ) - printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname ) + out_printf( " %s Loc: %s\n", spaces( depth ), object_str( t->boundname ) ); switch ( t->fate ) { case T_FATE_STABLE: - printf( " %s : Stable\n", spaces( depth ) ); + out_printf( " %s : Stable\n", spaces( depth ) ); break; case T_FATE_NEWER: - printf( " %s : Newer\n", spaces( depth ) ); + out_printf( " %s : Newer\n", spaces( depth ) ); break; case T_FATE_ISTMP: - printf( " %s : Up to date temp file\n", spaces( depth ) ); + out_printf( " %s : Up to date temp file\n", spaces( depth ) ); break; case T_FATE_NEEDTMP: - printf( " %s : Temporary file, to be updated\n", spaces( depth ) + out_printf( " %s : Temporary file, to be updated\n", spaces( depth ) ); break; case T_FATE_TOUCHED: - printf( " %s : Been touched, updating it\n", spaces( depth ) ); + out_printf( " %s : Been touched, updating it\n", spaces( depth ) ); break; case T_FATE_MISSING: - printf( " %s : Missing, creating it\n", spaces( depth ) ); + out_printf( " %s : Missing, creating it\n", spaces( depth ) ); break; case T_FATE_OUTDATED: - printf( " %s : Outdated, updating it\n", spaces( depth ) ); + out_printf( " %s : Outdated, updating it\n", spaces( depth ) ); break; case T_FATE_REBUILD: - printf( " %s : Rebuild, updating it\n", spaces( depth ) ); + out_printf( " %s : Rebuild, updating it\n", spaces( depth ) ); break; case T_FATE_UPDATE: - printf( " %s : Updating it\n", spaces( depth ) ); + out_printf( " %s : Updating it\n", spaces( depth ) ); break; case T_FATE_CANTFIND: - printf( " %s : Can not find it\n", spaces( depth ) ); + out_printf( " %s : Can not find it\n", spaces( depth ) ); break; case T_FATE_CANTMAKE: - printf( " %s : Can make it\n", spaces( depth ) ); + out_printf( " %s : Can make it\n", spaces( depth ) ); break; } if ( t->flags & ~T_FLAG_VISITED ) { - printf( " %s : ", spaces( depth ) ); - if ( t->flags & T_FLAG_TEMP ) printf( "TEMPORARY " ); - if ( t->flags & T_FLAG_NOCARE ) printf( "NOCARE " ); - if ( t->flags & T_FLAG_NOTFILE ) printf( "NOTFILE " ); - if ( t->flags & T_FLAG_TOUCHED ) printf( "TOUCHED " ); - if ( t->flags & T_FLAG_LEAVES ) printf( "LEAVES " ); - if ( t->flags & T_FLAG_NOUPDATE ) printf( "NOUPDATE " ); - printf( "\n" ); + out_printf( " %s : ", spaces( depth ) ); + if ( t->flags & T_FLAG_TEMP ) out_printf( "TEMPORARY " ); + if ( t->flags & T_FLAG_NOCARE ) out_printf( "NOCARE " ); + if ( t->flags & T_FLAG_NOTFILE ) out_printf( "NOTFILE " ); + if ( t->flags & T_FLAG_TOUCHED ) out_printf( "TOUCHED " ); + if ( t->flags & T_FLAG_LEAVES ) out_printf( "LEAVES " ); + if ( t->flags & T_FLAG_NOUPDATE ) out_printf( "NOUPDATE " ); + out_printf( "\n" ); } for ( c = t->depends; c; c = c->next ) { - printf( " %s : Depends on %s (%s)", spaces( depth ), + out_printf( " %s : Depends on %s (%s)", spaces( depth ), target_name( c->target ), target_fate[ (int)c->target->fate ] ); if ( !timestamp_cmp( &c->target->time, &t->time ) ) - printf( " (max time)"); - printf( "\n" ); + out_printf( " (max time)"); + out_printf( "\n" ); } for ( c = t->depends; c; c = c->next ) diff --git a/src/engine/make1.c b/src/engine/make1.c index 5a96dc4e5..7dbf7c8da 100644 --- a/src/engine/make1.c +++ b/src/engine/make1.c @@ -49,6 +49,7 @@ #include "rules.h" #include "search.h" #include "variable.h" +#include "output.h" #include #include @@ -245,19 +246,19 @@ int make1( LIST * targets ) /* Talk about it. */ if ( counts->failed ) - printf( "...failed updating %d target%s...\n", counts->failed, + out_printf( "...failed updating %d target%s...\n", counts->failed, counts->failed > 1 ? "s" : "" ); if ( DEBUG_MAKE && counts->skipped ) - printf( "...skipped %d target%s...\n", counts->skipped, + out_printf( "...skipped %d target%s...\n", counts->skipped, counts->skipped > 1 ? "s" : "" ); if ( DEBUG_MAKE && counts->made ) - printf( "...updated %d target%s...\n", counts->made, + out_printf( "...updated %d target%s...\n", counts->made, counts->made > 1 ? "s" : "" ); /* If we were interrupted, exit now that all child processes have finished. */ if ( intr ) - exit( 1 ); + exit( EXITBAD ); { LISTITER iter, end; @@ -425,11 +426,11 @@ static void make1b( state * const pState ) if ( ( t->flags & ( T_FLAG_RMOLD | T_FLAG_NOTFILE ) ) == T_FLAG_RMOLD ) { if ( !unlink( object_str( t->boundname ) ) ) - printf( "...removing outdated %s\n", object_str( t->boundname ) + out_printf( "...removing outdated %s\n", object_str( t->boundname ) ); } else - printf( "...skipped %s for lack of %s...\n", object_str( t->name ), + out_printf( "...skipped %s for lack of %s...\n", object_str( t->name ), failed_name ); } @@ -447,7 +448,7 @@ static void make1b( state * const pState ) case T_FATE_ISTMP: if ( DEBUG_MAKE ) - printf( "...using %s...\n", object_str( t->name ) ); + out_printf( "...using %s...\n", object_str( t->name ) ); break; case T_FATE_TOUCHED: @@ -464,7 +465,7 @@ static void make1b( state * const pState ) { ++counts->total; if ( DEBUG_MAKE && !( counts->total % 100 ) ) - printf( "...on %dth target...\n", counts->total ); + out_printf( "...on %dth target...\n", counts->total ); t->cmds = (char *)make1cmds( t ); /* Update the target's "progress" so MAKE1C processing counts it @@ -476,7 +477,7 @@ static void make1b( state * const pState ) /* All valid fates should have been accounted for by now. */ default: - printf( "ERROR: %s has bad fate %d", object_str( t->name ), + err_printf( "ERROR: %s has bad fate %d", object_str( t->name ), t->fate ); abort(); } @@ -492,7 +493,7 @@ static void make1b( state * const pState ) else if ( DEBUG_EXECCMD ) { CMD * cmd = ( CMD * )t->cmds; - printf( "Delaying %s %s: %d targets not ready\n", object_str( cmd->rule->name ), object_str( t->boundname ), cmd->asynccnt ); + out_printf( "Delaying %s %s: %d targets not ready\n", object_str( cmd->rule->name ), object_str( t->boundname ), cmd->asynccnt ); } } @@ -593,8 +594,6 @@ static void make1c( state const * const pState ) TARGET * saved_includes; SETTINGS * s; - t->rescanned = 1; - /* Clean current includes. */ saved_includes = t->includes; t->includes = 0; @@ -855,7 +854,7 @@ static void make1c_closure { call_timing_rule( t, time ); if ( DEBUG_EXECCMD ) - printf( "%f sec system; %f sec user\n", time->system, time->user ); + out_printf( "%f sec system; %f sec user\n", time->system, time->user ); /* Assume -p0 is in effect, i.e. cmd_stdout contains merged output. */ call_action_rule( t, status_orig, time, cmd->buf->value, cmd_stdout ); @@ -865,11 +864,11 @@ static void make1c_closure if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE ) { if ( !DEBUG_EXEC ) - printf( "%s\n", cmd->buf->value ); + out_printf( "%s\n", cmd->buf->value ); - printf( "...failed %s ", object_str( cmd->rule->name ) ); + out_printf( "...failed %s ", object_str( cmd->rule->name ) ); list_print( lol_get( (LOL *)&cmd->args, 0 ) ); - printf( "...\n" ); + out_printf( "...\n" ); } /* On interrupt, set quit so _everything_ fails. Do the same for failed @@ -896,7 +895,7 @@ static void make1c_closure char const * const filename = object_str( list_item( iter ) ); TARGET const * const t = bindtarget( list_item( iter ) ); if ( !( t->flags & T_FLAG_PRECIOUS ) && !unlink( filename ) ) - printf( "...removing %s\n", filename ); + out_printf( "...removing %s\n", filename ); } } @@ -938,7 +937,7 @@ static void push_cmds( CMDLIST * cmds, int status ) else if ( DEBUG_EXECCMD ) { TARGET * first_target = bindtarget( list_front( lol_get( &next_cmd->args, 0 ) ) ); - printf( "Delaying %s %s: %d targets not ready\n", object_str( next_cmd->rule->name ), object_str( first_target->boundname ), next_cmd->asynccnt ); + out_printf( "Delaying %s %s: %d targets not ready\n", object_str( next_cmd->rule->name ), object_str( first_target->boundname ), next_cmd->asynccnt ); } } else @@ -1153,12 +1152,12 @@ static CMD * make1cmds( TARGET * t ) : "contains a line that is too long"; assert( cmd_check_result == EXEC_CHECK_TOO_LONG || cmd_check_result == EXEC_CHECK_LINE_TOO_LONG ); - printf( "%s action %s (%d, max %d):\n", object_str( + out_printf( "%s action %s (%d, max %d):\n", object_str( rule->name ), error_message, cmd_error_length, cmd_error_max_length ); /* Tell the user what did not fit. */ - fputs( cmd->buf->value, stdout ); + out_puts( cmd->buf->value ); exit( EXITBAD ); } @@ -1399,7 +1398,7 @@ static int cmd_sem_lock( TARGET * t ) if ( iter->target->asynccnt > 0 ) { if ( DEBUG_EXECCMD ) - printf( "SEM: %s is busy, delaying launch of %s\n", + out_printf( "SEM: %s is busy, delaying launch of %s\n", object_str( iter->target->name ), object_str( t->name ) ); iter->target->parents = targetentry( iter->target->parents, t ); return 0; @@ -1410,7 +1409,7 @@ static int cmd_sem_lock( TARGET * t ) { ++iter->target->asynccnt; if ( DEBUG_EXECCMD ) - printf( "SEM: %s now used by %s\n", object_str( iter->target->name + out_printf( "SEM: %s now used by %s\n", object_str( iter->target->name ), object_str( t->name ) ); } /* A cmd only needs to be locked around its execution. @@ -1429,7 +1428,7 @@ static void cmd_sem_unlock( TARGET * t ) for ( iter = cmd->unlock; iter; iter = iter->next ) { if ( DEBUG_EXECCMD ) - printf( "SEM: %s is now free\n", object_str( + out_printf( "SEM: %s is now free\n", object_str( iter->target->name ) ); --iter->target->asynccnt; assert( iter->target->asynccnt <= 0 ); diff --git a/src/engine/object.c b/src/engine/object.c index ef46e4ae4..02440d2d1 100644 --- a/src/engine/object.c +++ b/src/engine/object.c @@ -23,6 +23,7 @@ #include "jam.h" #include "object.h" +#include "output.h" #include #include @@ -386,9 +387,9 @@ void object_done() if ( DEBUG_MEM ) { - printf( "%dK in strings\n", strtotal / 1024 ); + out_printf( "%dK in strings\n", strtotal / 1024 ); if ( strcount_in != strcount_out ) - printf( "--- %d strings of %d dangling\n", strcount_in - + out_printf( "--- %d strings of %d dangling\n", strcount_in - strcount_out, strcount_in ); } } diff --git a/src/engine/output.c b/src/engine/output.c index eaaee434b..2d9f41382 100644 --- a/src/engine/output.c +++ b/src/engine/output.c @@ -8,6 +8,7 @@ #include "output.h" #include +#include #define bjam_out (stdout) @@ -24,13 +25,87 @@ static void out_( char const * data, FILE * const io ) } +void out_flush() +{ + fflush( bjam_out ); + if ( globs.out ) fflush( globs.out ); +} +void err_flush() +{ + fflush( bjam_err ); + if ( globs.out ) fflush( globs.out ); +} +void out_puts(char const * const s) +{ + fputs( s, bjam_out ); + if ( globs.out ) fputs( s, globs.out ); +} +void err_puts(char const * const s) +{ + fputs( s, bjam_err ); + if ( globs.out ) fputs( s, globs.out ); +} +void out_putc(const char c) +{ + fputc( c, bjam_out ); + if ( globs.out ) fputc( c, globs.out ); +} +void err_putc(const char c) +{ + fputc( c, bjam_err ); + if ( globs.out ) fputc( c, globs.out ); +} +void out_data(char const * const s) +{ + out_( s, bjam_out ); + if ( globs.out ) out_( s, globs.out ); +} +void err_data(char const * const s) +{ + out_( s, bjam_err ); + if ( globs.out ) out_( s, globs.out ); +} +void out_printf(char const * const f, ...) +{ + { + va_list args; + va_start( args, f ); + vfprintf( bjam_out, f, args ); + va_end( args ); + } + if ( globs.out ) + { + va_list args; + va_start( args, f ); + vfprintf( globs.out, f, args ); + va_end( args ); + } +} +void err_printf(char const * const f, ...) +{ + { + va_list args; + va_start( args, f ); + vfprintf( bjam_err, f, args ); + va_end( args ); + } + if ( globs.out ) + { + va_list args; + va_start( args, f ); + vfprintf( globs.out, f, args ); + va_end( args ); + } +} + + void out_action ( char const * const action, char const * const target, char const * const command, - char const * const out_data, - char const * const err_data, + char const * const out_d, + char const * const err_d, int const exit_reason ) { @@ -38,41 +113,36 @@ void out_action * should be null. */ if ( action ) - fprintf( bjam_out, "%s %s\n", action, target ); + out_printf( "%s %s\n", action, target ); /* Print out the command executed if given -d+2. */ if ( DEBUG_EXEC ) { - fputs( command, bjam_out ); - fputc( '\n', bjam_out ); + out_puts( command ); + out_putc( '\n' ); } - /* Print out the command executed to the command stream. */ - if ( globs.cmdout ) - fputs( command, globs.cmdout ); - /* If the process expired, make user aware with an explicit message, but do * this only for non-quiet actions. */ if ( exit_reason == EXIT_TIMEOUT && action ) - fprintf( bjam_out, "%ld second time limit exceeded\n", globs.timeout ); + out_printf( "%ld second time limit exceeded\n", globs.timeout ); /* Print out the command output, if requested, or if the program failed, but * only output for non-quiet actions. */ if ( action || exit_reason != EXIT_OK ) { - if ( out_data && + if ( out_d && ( ( globs.pipe_action & 1 /* STDOUT_FILENO */ ) || ( globs.pipe_action == 0 ) ) ) - out_( out_data, bjam_out ); - if ( err_data && ( globs.pipe_action & 2 /* STDERR_FILENO */ ) ) - out_( err_data, bjam_err ); + out_data( out_d ); + if ( err_d && ( globs.pipe_action & 2 /* STDERR_FILENO */ ) ) + err_data( err_d ); } - fflush( bjam_out ); - fflush( bjam_err ); - fflush( globs.cmdout ); + out_flush(); + err_flush(); } diff --git a/src/engine/output.h b/src/engine/output.h index 186e867f6..b6a98ff79 100644 --- a/src/engine/output.h +++ b/src/engine/output.h @@ -23,6 +23,17 @@ void out_action( int const exit_reason ); +void out_flush(); +void err_flush(); +void out_puts(char const * const s); +void err_puts(char const * const s); +void out_putc(const char c); +void err_putc(const char c); +void out_data(char const * const s); +void err_data(char const * const s); +void out_printf(char const * const f, ...); +void err_printf(char const * const f, ...); + OBJECT * outf_int( int const value ); OBJECT * outf_double( double const value ); OBJECT * outf_time( timestamp const * const value ); diff --git a/src/engine/parse.h b/src/engine/parse.h index ed9f067b7..689d78e8c 100644 --- a/src/engine/parse.h +++ b/src/engine/parse.h @@ -41,6 +41,9 @@ #define PARSE_SETTINGS 16 #define PARSE_SWITCH 17 #define PARSE_WHILE 18 +#define PARSE_RETURN 19 +#define PARSE_BREAK 20 +#define PARSE_CONTINUE 21 /* diff --git a/src/engine/patchlevel.h b/src/engine/patchlevel.h index 4da43e1ff..0f345ea94 100644 --- a/src/engine/patchlevel.h +++ b/src/engine/patchlevel.h @@ -7,11 +7,11 @@ /* Keep JAMVERSYM in sync with VERSION. */ /* It can be accessed as $(JAMVERSION) in the Jamfile. */ -#define VERSION_MAJOR 2014 -#define VERSION_MINOR 3 +#define VERSION_MAJOR 2015 +#define VERSION_MINOR 7 #define VERSION_PATCH 0 -#define VERSION_MAJOR_SYM "2014" -#define VERSION_MINOR_SYM "03" +#define VERSION_MAJOR_SYM "2015" +#define VERSION_MINOR_SYM "07" #define VERSION_PATCH_SYM "00" -#define VERSION "2014.03" -#define JAMVERSYM "JAMVERSION=2014.03" +#define VERSION "2015.07" +#define JAMVERSYM "JAMVERSION=2015.07" diff --git a/src/engine/pathnt.c b/src/engine/pathnt.c index 412f5f4b5..077dc3b1b 100644 --- a/src/engine/pathnt.c +++ b/src/engine/pathnt.c @@ -23,6 +23,15 @@ #define WIN32_LEAN_AND_MEAN #include +#ifdef OS_CYGWIN +# include +# include +# ifdef CYGWIN_VERSION_CYGWIN_CONV +# include +# endif +# include +#endif + #include #include @@ -278,6 +287,97 @@ static path_key_entry * path_key( OBJECT * const path, } +/* + * translate_path_cyg2win() - conversion of a cygwin to a Windows path. + * + * FIXME: skip grist + */ + +#ifdef OS_CYGWIN +static int translate_path_cyg2win( string * path ) +{ + int translated = 0; + +#ifdef CYGWIN_VERSION_CYGWIN_CONV + /* Use new Cygwin API added with Cygwin 1.7. Old one had no error + * handling and has been deprecated. + */ + char * dynamicBuffer = 0; + char buffer[ MAX_PATH + 1001 ]; + char const * result = buffer; + cygwin_conv_path_t const conv_type = CCP_POSIX_TO_WIN_A | CCP_RELATIVE; + ssize_t const apiResult = cygwin_conv_path( conv_type, path->value, + buffer, sizeof( buffer ) / sizeof( *buffer ) ); + assert( apiResult == 0 || apiResult == -1 ); + assert( apiResult || strlen( result ) < sizeof( buffer ) / sizeof( + *buffer ) ); + if ( apiResult ) + { + result = 0; + if ( errno == ENOSPC ) + { + ssize_t const size = cygwin_conv_path( conv_type, path->value, + NULL, 0 ); + assert( size >= -1 ); + if ( size > 0 ) + { + dynamicBuffer = (char *)BJAM_MALLOC_ATOMIC( size ); + if ( dynamicBuffer ) + { + ssize_t const apiResult = cygwin_conv_path( conv_type, + path->value, dynamicBuffer, size ); + assert( apiResult == 0 || apiResult == -1 ); + if ( !apiResult ) + { + result = dynamicBuffer; + assert( strlen( result ) < size ); + } + } + } + } + } +#else /* CYGWIN_VERSION_CYGWIN_CONV */ + /* Use old Cygwin API deprecated with Cygwin 1.7. */ + char result[ MAX_PATH + 1 ]; + cygwin_conv_to_win32_path( path->value, result ); + assert( strlen( result ) <= MAX_PATH ); +#endif /* CYGWIN_VERSION_CYGWIN_CONV */ + + if ( result ) + { + string_copy( path, result ); + translated = 1; + } + +#ifdef CYGWIN_VERSION_CYGWIN_CONV + if ( dynamicBuffer ) + BJAM_FREE( dynamicBuffer ); +#endif + + return translated; +} +#endif /* OS_CYGWIN */ + + +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + +#ifdef OS_CYGWIN + translated = translate_path_cyg2win( file ); +#endif + + return translated; +} + + void path_register_key( OBJECT * canonic_path ) { path_key( canonic_path, 1 ); diff --git a/src/engine/pathsys.c b/src/engine/pathsys.c index ae4e6e052..26ec99171 100644 --- a/src/engine/pathsys.c +++ b/src/engine/pathsys.c @@ -42,6 +42,7 @@ */ unsigned long path_get_process_id_( void ); void path_get_temp_path_( string * buffer ); +int path_translate_to_os_( char const * f, string * file ); /* @@ -164,17 +165,21 @@ static char as_path_delim( char const c ) void path_build( PATHNAME * f, string * file ) { + int check_f; + int check_f_pos; + file_build1( f, file ); /* Do not prepend root if it is '.' or the directory is rooted. */ - if ( f->f_root.len - && !( f->f_root.len == 1 && f->f_root.ptr[ 0 ] == '.' ) - && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '/' ) + check_f = (f->f_root.len + && !( f->f_root.len == 1 && f->f_root.ptr[ 0 ] == '.') + && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '/' )); #if PATH_DELIM == '\\' - && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '\\' ) - && !( f->f_dir.len && f->f_dir.ptr[ 1 ] == ':' ) + check_f = (check_f + && !( f->f_dir.len && f->f_dir.ptr[ 0 ] == '\\' ) + && !( f->f_dir.len && f->f_dir.ptr[ 1 ] == ':' )); #endif - ) + if (check_f) { string_append_range( file, f->f_root.ptr, f->f_root.ptr + f->f_root.len ); @@ -190,11 +195,12 @@ void path_build( PATHNAME * f, string * file ) /* Put path separator between dir and file. */ /* Special case for root dir: do not add another path separator. */ - if ( f->f_dir.len && ( f->f_base.len || f->f_suffix.len ) + check_f_pos = (f->f_dir.len && ( f->f_base.len || f->f_suffix.len )); #if PATH_DELIM == '\\' - && !( f->f_dir.len == 3 && f->f_dir.ptr[ 1 ] == ':' ) + check_f_pos = (check_f_pos && !( f->f_dir.len == 3 && f->f_dir.ptr[ 1 ] == ':' )); #endif - && !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[ 0 ] ) ) ) + check_f_pos = (check_f_pos && !( f->f_dir.len == 1 && is_path_delim( f->f_dir.ptr[ 0 ]))); + if (check_f_pos) string_push_back( file, as_path_delim( f->f_dir.ptr[ f->f_dir.len ] ) ); if ( f->f_base.len ) @@ -283,3 +289,14 @@ OBJECT * path_tmpfile( void ) return result; } + + +/* + * path_translate_to_os() - translate filename to OS-native path + * + */ + +int path_translate_to_os( char const * f, string * file ) +{ + return path_translate_to_os_( f, file ); +} diff --git a/src/engine/pathsys.h b/src/engine/pathsys.h index 9b7a4caf1..9a99680b4 100644 --- a/src/engine/pathsys.h +++ b/src/engine/pathsys.h @@ -47,6 +47,7 @@ typedef struct _pathname void path_build( PATHNAME *, string * file ); void path_parse( char const * file, PATHNAME * ); void path_parent( PATHNAME * ); +int path_translate_to_os( char const *, string * file ); /* Given a path, returns an object containing an equivalent path in canonical * format that can be used as a unique key for that path. Equivalent paths such diff --git a/src/engine/pathunix.c b/src/engine/pathunix.c index 8ca0d185e..2b2347c87 100644 --- a/src/engine/pathunix.c +++ b/src/engine/pathunix.c @@ -43,6 +43,21 @@ void path_get_temp_path_( string * buffer ) } +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + + return translated; +} + + /* * path_register_key() */ diff --git a/src/engine/pathvms.c b/src/engine/pathvms.c new file mode 100644 index 000000000..01f3f90d9 --- /dev/null +++ b/src/engine/pathvms.c @@ -0,0 +1,244 @@ +/* + * Copyright 1993-2002 Christopher Seiwald and Perforce Software, Inc. + * + * This file is part of Jam - see jam.c for Copyright information. + */ + +/* This file is ALSO: + * Copyright 2001-2004 David Abrahams. + * Copyright 2005 Rene Rivera. + * Copyright 2015 Artur Shepilko. + * Distributed under the Boost Software License, Version 1.0. + * (See accompanying file LICENSE_1_0.txt or copy at + * http://www.boost.org/LICENSE_1_0.txt) + */ + + +/* + * pathvms.c - VMS-specific path manipulation support + * + * This implementation is based on POSIX-style path manipulation. + * + * VMS CTRL directly supports both POSIX- and native VMS-style path expressions, + * with the POSIX-to-VMS path translation performed internally by the same + * set of functions. For the most part such processing is transparent, with + * few differences mainly related to file-versions (in POSIX mode only the recent + * version is visible). + * + * This should allow us to some extent re-use pathunix.c implementation. + * + * Thus in jam-files the path references can also remain POSIX/UNIX-like on all + * levels EXCEPT in actions scope, where the path references must be translated + * to the native VMS-style. This approach is somewhat similar to jam CYGWIN + * handling. + * + * + * External routines: + * path_register_key() + * path_as_key() + * path_done() + * + * External routines called only via routines in pathsys.c: + * path_get_process_id_() + * path_get_temp_path_() + * path_translate_to_os_() + */ + + +#include "jam.h" + +#ifdef OS_VMS + +#include "pathsys.h" + +#include +#include +#include /* needed for getpid() */ +#include /* needed for decc$to_vms() */ + + +/* + * path_get_process_id_() + */ + +unsigned long path_get_process_id_( void ) +{ + return getpid(); +} + + +/* + * path_get_temp_path_() + */ + +void path_get_temp_path_( string * buffer ) +{ + char const * t = getenv( "TMPDIR" ); + string_append( buffer, t ? t : "/tmp" ); +} + + +/* + * translate_path_posix2vms() + * + * POSIX-to-VMS file specification translation: + * + * Translation is performed with decc$to_vms() CTRL routine (default decc$features) + * Some limitations apply: + * -- ODS-2 compliant file specs only (no spaces, punctuation chars etc.) + * + * -- wild-cards are not allowed + * In general decc$to_vms() can expand the wildcard for existing files, + * yet it cannot retain wild-cards in translated spec. Use GLOB for this. + * + * -- rooted path must refer to an existing/defined device or root-dir + * (e.g. /defconcealed/dir/file.ext or /existingrootdir/dir/file.ext ) + * + * -- POSIX dir/no-type-file path ambiguity (e.g. dir/newsubdir vs. dir/newfile + * is handled as follows: + * + * 1) first try as directory: + * -- if translated (may be a dir): means the file-path has no .type/suffix + * -- if not translated, then it may be a file (has .type) OR invalid spec + * 2) then try as file: + * -- if translated and also is a dir -- check if such file exists (stat) + * -- if not translated, but is a dir -- return as dir + * + * NOTE: on VMS it's possible to have both a file and a dir of the same name + * appear in the same directory. In such case _directory_ intent is assumed. + * + * It's preferrable to avoid such naming ambiguity in this context, so + * append an empty .type to specify a no-type file (eg. "filename.") + * + */ + + +static string * m_vmsfilespec = NULL; + +/* + * copy_vmsfilespec() - decc$to_vms action routine for matched filenames + */ + +static int copy_vmsfilespec( char * f, int type ) +{ + assert ( NULL != m_vmsfilespec && "Must be bound to a valid object" ); + + string_copy( m_vmsfilespec, f ); + + /* 0:Exit on first match (1:Process all) */ + return 0; +} + + +static int translate_path_posix2vms( string * path ) +{ + int translated = 0; + + string as_dir[ 1 ]; + string as_file[ 1 ]; + int dir_count; + int file_count; + + unsigned char is_dir; + unsigned char is_file; + unsigned char is_ambiguous; + + string_new( as_dir ); + string_new( as_file ); + + + m_vmsfilespec = as_dir; + + /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */ + dir_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 2 ); + + + m_vmsfilespec = as_file; + + /* MATCH 0:do not allow wildcards, 0:allow directories (2:dir only) */ + file_count = decc$to_vms( path->value, copy_vmsfilespec, 0, 0 ); + + m_vmsfilespec = NULL; + + + translated = ( file_count || dir_count ); + + if ( file_count && dir_count ) + { + struct stat statbuf; + + /* use as_file only when exists AND as_dir does not exist + * otherwise use as_dir + */ + if ( stat(as_dir->value, &statbuf ) < 0 + && stat(as_file->value, &statbuf ) > 0 + && ( statbuf.st_mode & S_IFREG ) ) + { + string_copy( path, as_file->value ); + } + else + { + string_copy( path, as_dir->value ); + } + } + else if ( file_count ) { string_copy( path, as_file->value ); } + else if ( dir_count ) { string_copy( path, as_dir->value ); } + else + { + /* error: unable to translate path to native format */ + translated = 0; + } + + string_free( as_dir ); + string_free( as_file ); + + return translated; +} + + +/* + * path_translate_to_os_() + */ + +int path_translate_to_os_( char const * f, string * file ) +{ + int translated = 0; + + /* by default, pass on the original path */ + string_copy( file, f ); + + translated = translate_path_posix2vms( file ); + + return translated; +} + + +/* + * path_register_key() + */ + +void path_register_key( OBJECT * path ) +{ +} + + +/* + * path_as_key() + */ + +OBJECT * path_as_key( OBJECT * path ) +{ + return object_copy( path ); +} + + +/* + * path_done() + */ + +void path_done( void ) +{ +} + +#endif + diff --git a/src/engine/regexp.c b/src/engine/regexp.c index c64201b90..80084e25c 100644 --- a/src/engine/regexp.c +++ b/src/engine/regexp.c @@ -45,6 +45,7 @@ #include "jam.h" #include "regexp.h" +#include "output.h" #include #include @@ -907,12 +908,12 @@ regmatch( char * prog ) scan = prog; #ifdef DEBUG if (scan != NULL && regnarrate) - fprintf(stderr, "%s(\n", regprop(scan)); + err_printf("%s(\n", regprop(scan)); #endif while (scan != NULL) { #ifdef DEBUG if (regnarrate) - fprintf(stderr, "%s...\n", regprop(scan)); + err_printf("%s...\n", regprop(scan)); #endif next = regnext(scan); @@ -1180,32 +1181,32 @@ regdump( regexp *r ) s = r->program + 1; while (op != END) { /* While that wasn't END last time... */ op = OP(s); - printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */ + out_printf("%2d%s", s-r->program, regprop(s)); /* Where, what. */ next = regnext(s); if (next == NULL) /* Next ptr. */ - printf("(0)"); + out_printf("(0)"); else - printf("(%d)", (s-r->program)+(next-s)); + out_printf("(%d)", (s-r->program)+(next-s)); s += 3; if (op == ANYOF || op == ANYBUT || op == EXACTLY) { /* Literal string, where present. */ while (*s != '\0') { - putchar(*s); + out_putc(*s); s++; } s++; } - putchar('\n'); + out_putc('\n'); } /* Header fields of interest. */ if (r->regstart != '\0') - printf("start `%c' ", r->regstart); + out_printf("start `%c' ", r->regstart); if (r->reganch) - printf("anchored "); + out_printf("anchored "); if (r->regmust != NULL) - printf("must have \"%s\"", r->regmust); - printf("\n"); + out_printf("must have \"%s\"", r->regmust); + out_printf("\n"); } /* diff --git a/src/engine/rules.c b/src/engine/rules.c index 7947c5507..eb35aa6df 100644 --- a/src/engine/rules.c +++ b/src/engine/rules.c @@ -63,7 +63,6 @@ static TARGET * get_target_includes( TARGET * const t ) i->name = object_copy( t->name ); i->boundname = object_copy( i->name ); i->flags |= T_FLAG_NOTFILE | T_FLAG_INTERNAL; - i->original_target = t; t->includes = i; } return t->includes; diff --git a/src/engine/rules.h b/src/engine/rules.h index f3a020bb8..d118b3149 100644 --- a/src/engine/rules.h +++ b/src/engine/rules.h @@ -125,6 +125,16 @@ struct _target ACTIONS * actions; /* rules to execute, if any */ SETTINGS * settings; /* variables to define */ + TARGETS * depends; /* dependencies */ + TARGETS * dependants; /* the inverse of dependencies */ + TARGETS * rebuilds; /* targets that should be force-rebuilt + * whenever this one is + */ + TARGET * includes; /* internal includes node */ + + timestamp time; /* update time */ + timestamp leaf; /* update time of leaf sources */ + short flags; /* status info */ #define T_FLAG_TEMP 0x0001 /* TEMPORARY applied */ @@ -164,18 +174,6 @@ struct _target #define T_BIND_PARENTS 2 /* using parent's timestamp */ #define T_BIND_EXISTS 3 /* real file, timestamp valid */ - TARGETS * depends; /* dependencies */ - TARGETS * dependants; /* the inverse of dependencies */ - TARGETS * rebuilds; /* targets that should be force-rebuilt - * whenever this one is - */ - TARGET * includes; /* internal includes node */ - TARGET * original_target; /* original_target->includes = this */ - char rescanned; - - timestamp time; /* update time */ - timestamp leaf; /* update time of leaf sources */ - char fate; /* make0()'s diagnosis */ #define T_FATE_INIT 0 /* nothing done to target */ @@ -212,12 +210,12 @@ struct _target #define T_MAKE_SEMAPHORE 5 /* Special target type for semaphores */ #endif + char status; /* exec_cmd() result */ + #ifdef OPT_SEMAPHORE TARGET * semaphore; /* used in serialization */ #endif - char status; /* exec_cmd() result */ - int asynccnt; /* child deps outstanding */ TARGETS * parents; /* used by make1() for completion */ TARGET * scc_root; /* used by make to resolve cyclic includes diff --git a/src/engine/scan.c b/src/engine/scan.c index fb0ad82a4..ab858b0ee 100644 --- a/src/engine/scan.c +++ b/src/engine/scan.c @@ -11,6 +11,7 @@ #include "jam.h" #include "scan.h" +#include "output.h" #include "constants.h" #include "jambase.h" @@ -74,7 +75,7 @@ void yyerror( char const * s ) * will hold the information about where the token started while incp will * hold the information about where reading it broke. */ - printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s, + out_printf( "%s:%d: %s at %s\n", object_str( yylval.file ), yylval.line, s, symdump( &yylval ) ); ++anyerrors; } @@ -370,7 +371,7 @@ int yylex() } if ( DEBUG_SCAN ) - printf( "scan %s\n", symdump( &yylval ) ); + out_printf( "scan %s\n", symdump( &yylval ) ); return yylval.type; diff --git a/src/engine/search.c b/src/engine/search.c index b2beadaaa..e21ae3687 100644 --- a/src/engine/search.c +++ b/src/engine/search.c @@ -23,6 +23,7 @@ #include "strings.h" #include "timestamp.h" #include "variable.h" +#include "output.h" #include @@ -102,7 +103,7 @@ void set_explicit_binding( OBJECT * target, OBJECT * locate ) path_build( f, buf ); boundname = object_new( buf->value ); if ( DEBUG_SEARCH ) - printf( "explicit locate %s: %s\n", object_str( target ), buf->value ); + out_printf( "explicit locate %s: %s\n", object_str( target ), buf->value ); string_free( buf ); key = path_as_key( boundname ); object_free( boundname ); @@ -168,7 +169,7 @@ OBJECT * search( OBJECT * target, timestamp * const time, path_build( f, buf ); if ( DEBUG_SEARCH ) - printf( "locate %s: %s\n", object_str( target ), buf->value ); + out_printf( "locate %s: %s\n", object_str( target ), buf->value ); key = object_new( buf->value ); timestamp_from_path( time, key ); @@ -194,7 +195,7 @@ OBJECT * search( OBJECT * target, timestamp * const time, path_build( f, buf ); if ( DEBUG_SEARCH ) - printf( "search %s: %s\n", object_str( target ), buf->value ); + out_printf( "search %s: %s\n", object_str( target ), buf->value ); test_path = object_new( buf->value ); key = path_as_key( test_path ); @@ -205,7 +206,7 @@ OBJECT * search( OBJECT * target, timestamp * const time, if ( ( ba = (BINDING *)hash_find( explicit_bindings, key ) ) ) { if ( DEBUG_SEARCH ) - printf(" search %s: found explicitly located target %s\n", + out_printf(" search %s: found explicitly located target %s\n", object_str( target ), object_str( ba->target ) ); if ( another_target ) *another_target = ba->target; @@ -241,7 +242,7 @@ OBJECT * search( OBJECT * target, timestamp * const time, path_build( f, buf ); if ( DEBUG_SEARCH ) - printf( "search %s: %s\n", object_str( target ), buf->value ); + out_printf( "search %s: %s\n", object_str( target ), buf->value ); key = object_new( buf->value ); timestamp_from_path( time, key ); diff --git a/src/engine/timestamp.c b/src/engine/timestamp.c index 0d016985e..17510bcd0 100644 --- a/src/engine/timestamp.c +++ b/src/engine/timestamp.c @@ -32,6 +32,7 @@ #include "object.h" #include "pathsys.h" #include "strings.h" +#include "output.h" /* @@ -231,7 +232,7 @@ static void time_enter( void * closure, OBJECT * target, int const found, b->progress = found ? BIND_FOUND : BIND_SPOTTED; if ( DEBUG_BINDSCAN ) - printf( "time ( %s ) : %s\n", object_str( target ), time_progress[ + out_printf( "time ( %s ) : %s\n", object_str( target ), time_progress[ b->progress ] ); object_free( target ); diff --git a/src/engine/variable.c b/src/engine/variable.c index 2c292fbc8..ea99d687c 100644 --- a/src/engine/variable.c +++ b/src/engine/variable.c @@ -39,6 +39,7 @@ #include "parse.h" #include "pathsys.h" #include "strings.h" +#include "output.h" #include #include @@ -211,6 +212,53 @@ LIST * var_get( struct module_t * module, OBJECT * symbol ) var_dump( v->symbol, v->value, "get" ); result = v->value; } + +#ifdef OS_VMS + else if ( ( module->name && object_equal( module->name, constant_ENVIRON ) ) + || root_module() == module ) + { + /* On VMS, when a variable from root or ENVIRON module is not found, + * explicitly request it from the process. + * By design, process variables (and logicals) are not made available + * to C main(), and thus will not get loaded in bulk to root/ENVRON. + * So we get around it by getting any such variable on first request. + */ + const char * val = getenv( object_str( symbol ) ); + + if ( val ) + { + struct module_t * environ_module = module; + char * environ[ 2 ] = { 0 }; /* NULL-terminated */ + string buf[ 1 ]; + + if ( root_module() == module ) + { + environ_module = bindmodule( constant_ENVIRON ); + } + + string_copy( buf, object_str( symbol ) ); + string_append( buf, "=" ); + string_append( buf, val ); + + environ[ 0 ] = buf->value; + + /* Load variable to global module, with splitting, for backward + * compatibility. Then to .ENVIRON, without splitting. + */ + var_defines( root_module(), environ, 1 ); + var_defines( environ_module, environ, 0 ); + string_free( buf ); + + if ( module->variables && ( v = (VARIABLE *)hash_find( + module->variables, symbol ) ) ) + { + if ( DEBUG_VARGET ) + var_dump( v->symbol, v->value, "get" ); + result = v->value; + } + } + } +#endif } return result; } @@ -319,9 +367,9 @@ static LIST * * var_enter( struct module_t * module, OBJECT * symbol ) static void var_dump( OBJECT * symbol, LIST * value, char * what ) { - printf( "%s %s = ", what, object_str( symbol ) ); + out_printf( "%s %s = ", what, object_str( symbol ) ); list_print( value ); - printf( "\n" ); + out_printf( "\n" ); } diff --git a/src/kernel/bootstrap.py b/src/kernel/bootstrap.py index 2e8dd37b7..3746b5e66 100644 --- a/src/kernel/bootstrap.py +++ b/src/kernel/bootstrap.py @@ -1,6 +1,6 @@ -# Copyright 2009 Vladimir Prus +# Copyright 2009 Vladimir Prus # -# Distributed under the Boost Software License, Version 1.0. +# Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import imp diff --git a/src/kernel/modules.jam b/src/kernel/modules.jam index 425822532..251bdf191 100644 --- a/src/kernel/modules.jam +++ b/src/kernel/modules.jam @@ -144,7 +144,7 @@ rule load ( filename ?= $(module-name).jam ; # Mark the module loaded so we do not try to load it recursively. - .loaded += $(module-name) ; + .loaded += $(module-name:B) ; # Suppress tests if any module loads are already in progress. local suppress-test = $(.loading[1]) ; @@ -263,7 +263,8 @@ rule import ( module-names + : rules-opt * : rename-opt * ) # Import each specified module for local m in $(module-names) { - if ! $(m) in $(.loaded) + local module-name = $(m:B) ; + if ! $(module-name) in $(.loaded) { # If the importing module is not already in the BOOST_BUILD_PATH, # prepend it to the path. We do not want to invert the search order @@ -285,18 +286,23 @@ rule import ( module-names + : rules-opt * : rename-opt * ) { search = $(caller-location) $(search) ; } + + if $(m:D) + { + search = $(caller-location)/$(m:D) $(search)/$(m:D) $(search) ; + } - load $(m) : : $(search) ; + load $(module-name) : : $(search) ; } - IMPORT_MODULE $(m) : $(caller) ; + IMPORT_MODULE $(module-name) : $(caller) ; if $(rules-opt) { local source-names ; if $(rules-opt) = * { - local all-rules = [ RULENAMES $(m) ] ; + local all-rules = [ RULENAMES $(module-name) ] ; source-names = $(all-rules) ; } else @@ -305,7 +311,7 @@ rule import ( module-names + : rules-opt * : rename-opt * ) } local target-names = $(rename-opt) ; target-names ?= $(source-names) ; - IMPORT $(m) : $(source-names) : $(caller) : $(target-names) ; + IMPORT $(module-name) : $(source-names) : $(caller) : $(target-names) ; } } } diff --git a/src/manager.py b/src/manager.py index 473857fc7..9c1e05733 100644 --- a/src/manager.py +++ b/src/manager.py @@ -14,7 +14,7 @@ class Manager: """ This class is a facade to the Boost.Build system. It serves as the root to access all data structures in use. """ - + def __init__ (self, engine, global_build_dir): """ Constructor. engine: the build engine that will actually construct the targets. @@ -26,7 +26,7 @@ class Manager: from build.errors import Errors from b2.util.logger import NullLogger from build import build_request, property_set, feature - + self.engine_ = engine self.virtual_targets_ = VirtualTargetRegistry (self) self.projects_ = ProjectRegistry (self, global_build_dir) @@ -37,16 +37,16 @@ class Manager: self.boost_build_path_ = bjam.variable("BOOST_BUILD_PATH") self.errors_ = Errors() self.command_line_free_features_ = property_set.empty() - + global the_manager the_manager = self - + def scanners (self): return self.scanners_ def engine (self): return self.engine_ - + def virtual_targets (self): return self.virtual_targets_ @@ -58,7 +58,7 @@ class Manager: def argv (self): return self.argv_ - + def logger (self): return self.logger_ @@ -88,7 +88,7 @@ class Manager: if not targets: for name, project in self.projects ().projects (): targets.append (project.target ()) - + property_groups = build_request.expand_no_defaults (properties) virtual_targets = [] @@ -107,4 +107,4 @@ class Manager: actual_targets = [] for virtual_target in virtual_targets: actual_targets.extend (virtual_target.actualize ()) - + diff --git a/src/tools/boostbook.jam b/src/tools/boostbook.jam index 42342d9bb..13d55fc2f 100644 --- a/src/tools/boostbook.jam +++ b/src/tools/boostbook.jam @@ -54,8 +54,8 @@ project boostbook ; ] ] ; feature.feature format - : html xhtml htmlhelp onehtml man pdf ps docbook fo tests - : incidental implicit composite propagated ; + : html xhtml htmlhelp onehtml man pdf ps docbook fo tests none + : implicit composite propagated ; type.register DTDXML : dtdxml ; type.register XML : xml ; @@ -643,6 +643,7 @@ class boostbook-target-class : basic-target import generators ; import property-set ; import virtual-target ; + import path ; rule construct ( name : sources * : property-set ) { @@ -690,6 +691,11 @@ class boostbook-target-class : basic-target target = $(target[2]) ; local name = [ $(property-set).get ] ; name ?= $(format) ; + if ! [ path.is-rooted $(name) ] + { + local p = [ project ] ; + name = [ path.join [ $(p).location ] $(name) ] ; + } $(target).set-path $(name) ; } else diff --git a/src/tools/builtin.jam b/src/tools/builtin.jam index 92959afc0..f39fb7018 100644 --- a/src/tools/builtin.jam +++ b/src/tools/builtin.jam @@ -41,7 +41,7 @@ import generate ; .os-names = aix android bsd cygwin darwin freebsd haiku hpux iphone linux netbsd - openbsd osf qnx qnxnto sgi solaris unix unixware windows + openbsd osf qnx qnxnto sgi solaris unix unixware windows vms elf # Not actually an OS -- used for targeting bare metal where object # format is ELF. This catches both -elf and -eabi gcc targets and well # as other compilers targeting ELF. It is not clear how often we need @@ -80,6 +80,7 @@ local rule default-host-os ( ) case MACOSX : host-os = darwin ; case KFREEBSD : host-os = freebsd ; case LINUX : host-os = linux ; + case VMS : host-os = vms ; case SUNOS : ECHO "SunOS is not a supported operating system." ; ECHO "We believe last version of SunOS was released in 1992, " ; @@ -140,6 +141,8 @@ feature.feature asmflags : : free ; feature.feature linkflags : : free ; feature.feature archiveflags : : free ; feature.feature version : : free ; +feature.feature mflags : : free ; +feature.feature mmflags : : free ; # Generic, i.e. non-language specific, flags for tools. feature.feature flags : : free ; diff --git a/src/tools/builtin.py b/src/tools/builtin.py index 14a883e1b..a149a3332 100644 --- a/src/tools/builtin.py +++ b/src/tools/builtin.py @@ -13,7 +13,7 @@ import b2.build.targets as targets import sys from b2.build import feature, property, virtual_target, generators, type, property_set, scanner from b2.util.utility import * -from b2.util import path, regex, bjam_signature +from b2.util import path, regex, bjam_signature, is_iterable_typed import b2.tools.types from b2.manager import get_manager @@ -36,15 +36,15 @@ def variant (name, parents_or_properties, explicit_properties = []): refining parents' explicit properties with the passed explicit properties. The result is remembered and will be used if this variant is used as parent. - + Second, determines the full property set for this variant by - adding to the explicit properties default values for all properties + adding to the explicit properties default values for all properties which neither present nor are symmetric. - + Lastly, makes appropriate value of 'variant' property expand to the full property set. name: Name of the variant - parents_or_properties: Specifies parent variants, if + parents_or_properties: Specifies parent variants, if 'explicit_properties' are given, and explicit_properties otherwise. explicit_properties: Explicit properties. @@ -54,7 +54,7 @@ def variant (name, parents_or_properties, explicit_properties = []): explicit_properties = parents_or_properties else: parents = parents_or_properties - + inherited = property_set.empty() if parents: @@ -62,22 +62,22 @@ def variant (name, parents_or_properties, explicit_properties = []): # between base variants, and there was no demand for so to bother. if len (parents) > 1: raise BaseException ("Multiple base variants are not yet supported") - + p = parents[0] # TODO: the check may be stricter if not feature.is_implicit_value (p): raise BaseException ("Invalid base variant '%s'" % p) - + inherited = __variant_explicit_properties[p] explicit_properties = property_set.create_with_validation(explicit_properties) explicit_properties = inherited.refine(explicit_properties) - + # Record explicitly specified properties for this variant # We do this after inheriting parents' properties, so that # they affect other variants, derived from this one. __variant_explicit_properties[name] = explicit_properties - + feature.extend('variant', [name]) feature.compose ("" + name, explicit_properties.all()) @@ -128,20 +128,20 @@ def register_globals (): feature.feature('target-os', __os_names, ['propagated', 'link-incompatible']) feature.set_default('target-os', default_host_os()) - + feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric']) - + feature.feature ('stdlib', ['native'], ['propagated', 'composite']) - + feature.feature ('link', ['shared', 'static'], ['propagated']) feature.feature ('runtime-link', ['shared', 'static'], ['propagated']) feature.feature ('runtime-debugging', ['on', 'off'], ['propagated']) - - + + feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated']) feature.feature ('profiling', ['off', 'on'], ['propagated']) feature.feature ('inlining', ['off', 'on', 'full'], ['propagated']) - + feature.feature ('threading', ['single', 'multi'], ['propagated']) feature.feature ('rtti', ['on', 'off'], ['propagated']) feature.feature ('exception-handling', ['on', 'off'], ['propagated']) @@ -162,21 +162,21 @@ def register_globals (): feature.feature ('linkflags', [], ['free']) feature.feature ('archiveflags', [], ['free']) feature.feature ('version', [], ['free']) - + feature.feature ('location-prefix', [], ['free']) feature.feature ('action', [], ['free']) - + # The following features are incidental, since # in themself they have no effect on build products. # Not making them incidental will result in problems in corner # cases, for example: - # + # # unit-test a : a.cpp : b ; # lib b : a.cpp b ; - # - # Here, if is not incidental, we'll decide we have two + # + # Here, if is not incidental, we'll decide we have two # targets for a.obj with different properties, and will complain. # # Note that making feature incidental does not mean it's ignored. It may @@ -196,7 +196,7 @@ def register_globals (): 'off', # Do not fail the compilation if there are warnings. 'on'], # Fail the compilation if there are warnings. ['incidental', 'propagated']) - + feature.feature('c++-template-depth', [str(i) for i in range(64,1024+1,64)] + [str(i) for i in range(20,1000+1,10)] + @@ -213,31 +213,31 @@ def register_globals (): feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ; # Internal feature. feature.feature ('library-file', [], ['free', 'dependency']) - + feature.feature ('name', [], ['free']) feature.feature ('tag', [], ['free']) feature.feature ('search', [], ['free', 'path']) #order-sensitive ; feature.feature ('location', [], ['free', 'path']) - + feature.feature ('dll-path', [], ['free', 'path']) feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental']) - - + + # This is internal feature which holds the paths of all dependency # dynamic libraries. On Windows, it's needed so that we can all # those paths to PATH, when running applications. # On Linux, it's needed to add proper -rpath-link command line options. feature.feature ('xdll-path', [], ['free', 'path']) - + #provides means to specify def-file for windows dlls. feature.feature ('def-file', [], ['free', 'dependency']) - + # This feature is used to allow specific generators to run. # For example, QT tools can only be invoked when QT library # is used. In that case, qt will be in usage requirement # of the library. feature.feature ('allow', [], ['free']) - + # The addressing model to generate code for. Currently a limited set only # specifying the bit size of pointers. feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional']) @@ -261,7 +261,7 @@ def register_globals (): # HP/PA-RISC 'parisc', - + # Advanced RISC Machines 'arm', @@ -307,7 +307,7 @@ def register_globals (): # HP/PA-RISC '700', '7100', '7100lc', '7200', '7300', '8000', - + # Advanced RISC Machines 'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5', 'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'], @@ -318,17 +318,17 @@ def register_globals (): # The value of 'no' prevents building of a target. feature.feature('build', ['yes', 'no'], ['optional']) - + # Windows-specific features feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], []) feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric']) variant ('debug', ['off', 'on', 'off', 'on']) - variant ('release', ['speed', 'off', 'full', + variant ('release', ['speed', 'off', 'full', 'off', 'NDEBUG']) variant ('profile', ['release'], ['on', 'on']) - + reset () register_globals () @@ -336,19 +336,19 @@ register_globals () class SearchedLibTarget (virtual_target.AbstractFileTarget): def __init__ (self, name, project, shared, search, action): virtual_target.AbstractFileTarget.__init__ (self, name, 'SEARCHED_LIB', project, action) - + self.shared_ = shared self.search_ = search def shared (self): return self.shared_ - + def search (self): return self.search_ - + def actualize_location (self, target): bjam.call("NOTFILE", target) - + def path (self): #FIXME: several functions rely on this not being None return "" @@ -361,24 +361,24 @@ class CScanner (scanner.Scanner): self.includes_ = [] for i in includes: - self.includes_.extend(i.split("&&")) + self.includes_.extend(i.split("&&")) def pattern (self): return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")' def process (self, target, matches, binding): - + angle = regex.transform (matches, "<(.*)>") quoted = regex.transform (matches, '"(.*)"') g = str(id(self)) b = os.path.normpath(os.path.dirname(binding[0])) - + # Attach binding of including file to included targets. # When target is directly created from virtual target # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. + # cases, it allows to distinguish between two headers of the + # same name included from different places. # We don't need this extra information for angle includes, # since they should not depend on including file (we can't # get literal "." in include path). @@ -395,11 +395,11 @@ class CScanner (scanner.Scanner): engine = get_manager().engine() engine.set_target_variable(angle, "SEARCH", get_value(self.includes_)) engine.set_target_variable(quoted, "SEARCH", [b] + get_value(self.includes_)) - + # Just propagate current scanner to includes, in a hope - # that includes do not change scanners. + # that includes do not change scanners. get_manager().scanners().propagate(self, angle + quoted) - + scanner.register (CScanner, 'include') type.set_scanner ('CPP', CScanner) type.set_scanner ('C', CScanner) @@ -407,15 +407,18 @@ type.set_scanner ('C', CScanner) # Ported to trunk@47077 class LibGenerator (generators.Generator): """ The generator class for libraries (target type LIB). Depending on properties it will - request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or + request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or SHARED_LIB. """ def __init__(self, id, composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []): generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements) - - def run(self, project, name, prop_set, sources): + def run(self, project, name, prop_set, sources): + assert isinstance(project, targets.ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) # The lib generator is composing, and can be only invoked with # explicit name. This check is present in generator.run (and so in # builtin.LinkingGenerator), but duplicate it here to avoid doing @@ -429,7 +432,7 @@ class LibGenerator (generators.Generator): ('' in properties_grist or '' in properties_grist): actual_type = 'SEARCHED_LIB' elif '' in properties_grist: - # The generator for + # The generator for actual_type = 'LIB' elif 'shared' in properties: actual_type = 'SHARED_LIB' @@ -451,7 +454,11 @@ generators.override("builtin.prebuilt", "builtin.lib-generator") def lib(names, sources=[], requirements=[], default_build=[], usage_requirements=[]): """The implementation of the 'lib' rule. Beyond standard syntax that rule allows simplified: 'lib a b c ;'.""" - + assert is_iterable_typed(names, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert is_iterable_typed(default_build, basestring) + assert is_iterable_typed(usage_requirements, basestring) if len(names) > 1: if any(r.startswith('') for r in requirements): get_manager().errors()("When several names are given to the 'lib' rule\n" + @@ -490,8 +497,12 @@ class SearchedLibGenerator (generators.Generator): # is make sure SearchedLibGenerator is not invoked deep in transformation # search. generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - + def run(self, project, name, prop_set, sources): + assert isinstance(project, targets.ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) if not name: return None @@ -499,12 +510,12 @@ class SearchedLibGenerator (generators.Generator): # If name is empty, it means we're called not from top-level. # In this case, we just fail immediately, because SearchedLibGenerator # cannot be used to produce intermediate targets. - + properties = prop_set.raw () shared = 'shared' in properties a = virtual_target.NullAction (project.manager(), prop_set) - + real_name = feature.get_values ('', properties) if real_name: real_name = real_name[0] @@ -515,7 +526,7 @@ class SearchedLibGenerator (generators.Generator): t = SearchedLibTarget(real_name, project, shared, search, a) # We return sources for a simple reason. If there's - # lib png : z : png ; + # lib png : z : png ; # the 'z' target should be returned, so that apps linking to # 'png' will link to 'z', too. return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources) @@ -525,9 +536,14 @@ generators.register (SearchedLibGenerator ()) class PrebuiltLibGenerator(generators.Generator): def __init__(self, id, composing, source_types, target_types_and_names, requirements): - generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) + generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) def run(self, project, name, properties, sources): + assert isinstance(project, targets.ProjectTarget) + assert isinstance(name, basestring) + assert isinstance(properties, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + f = properties.get("file") return f + sources @@ -542,10 +558,11 @@ class CompileAction (virtual_target.Action): virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set) def adjust_properties (self, prop_set): - """ For all virtual targets for the same dependency graph as self, + """ For all virtual targets for the same dependency graph as self, i.e. which belong to the same main target, add their directories to include path. """ + assert isinstance(prop_set, property_set.PropertySet) s = self.targets () [0].creating_subvariant () return prop_set.add_raw (s.implicit_includes ('include', 'H')) @@ -560,7 +577,7 @@ class CCompilingGenerator (generators.Generator): def __init__ (self, id, composing, source_types, target_types_and_names, requirements): # TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong. generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - + def action_class (self): return CompileAction @@ -574,11 +591,15 @@ class LinkingGenerator (generators.Generator): """ def __init__ (self, id, composing, source_types, target_types_and_names, requirements): generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - + def run (self, project, name, prop_set, sources): + assert isinstance(project, targets.ProjectTarget) + assert isinstance(name, basestring) or name is None + assert isinstance(prop_set, property_set.PropertySet) + assert is_iterable_typed(sources, virtual_target.VirtualTarget) sources.extend(prop_set.get('')) - + # Add properties for all searched libraries extra = [] for s in sources: @@ -589,8 +610,8 @@ class LinkingGenerator (generators.Generator): # It's possible that we have libraries in sources which did not came # from 'lib' target. For example, libraries which are specified # just as filenames as sources. We don't have xdll-path properties - # for such target, but still need to add proper dll-path properties. - extra_xdll_path = [] + # for such target, but still need to add proper dll-path properties. + extra_xdll_path = [] for s in sources: if type.is_derived (s.type (), 'SHARED_LIB') and not s.action (): # Unfortunately, we don't have a good way to find the path @@ -602,7 +623,7 @@ class LinkingGenerator (generators.Generator): # Hardcode DLL paths only when linking executables. # Pros: do not need to relink libraries when installing. # Cons: "standalone" libraries (plugins, python extensions) can not - # hardcode paths to dependent libraries. + # hardcode paths to dependent libraries. if prop_set.get('') == ['true'] \ and type.is_derived(self.target_types_ [0], 'EXE'): xdll_path = prop_set.get('') @@ -610,64 +631,69 @@ class LinkingGenerator (generators.Generator): for sp in extra_xdll_path) extra.extend(property.Property('', sp) \ for sp in xdll_path) - + if extra: - prop_set = prop_set.add_raw (extra) + prop_set = prop_set.add_raw (extra) result = generators.Generator.run(self, project, name, prop_set, sources) - + if result: ur = self.extra_usage_requirements(result, prop_set) ur = ur.add(property_set.create(['' + p for p in extra_xdll_path])) else: return None return (ur, result) - + def extra_usage_requirements (self, created_targets, prop_set): - + assert is_iterable_typed(created_targets, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + result = property_set.empty () extra = [] - + # Add appropriate usage requirements. raw = prop_set.raw () if 'shared' in raw: paths = [] - - # TODO: is it safe to use the current directory? I think we should use + + # TODO: is it safe to use the current directory? I think we should use # another mechanism to allow this to be run from anywhere. pwd = os.getcwd() - + for t in created_targets: if type.is_derived(t.type(), 'SHARED_LIB'): paths.append(path.root(path.make(t.path()), pwd)) extra += replace_grist(paths, '') - + # We need to pass features that we've got from sources, # because if shared library is built, exe which uses it must know paths # to other shared libraries this one depends on, to be able to find them # all at runtime. - + # Just pass all features in property_set, it's theorically possible # that we'll propagate features explicitly specified by - # the user, but then the user's to blaim for using internal feature. + # the user, but then the user's to blaim for using internal feature. values = prop_set.get('') extra += replace_grist(values, '') - + if extra: result = property_set.create(extra) return result def generated_targets (self, sources, prop_set, project, name): - + assert is_iterable_typed(sources, virtual_target.VirtualTarget) + assert isinstance(prop_set, property_set.PropertySet) + assert isinstance(project, targets.ProjectTarget) + assert isinstance(name, basestring) # sources to pass to inherited rule sources2 = [] # sources which are libraries libraries = [] - + # Searched libraries are not passed as argument to linker # but via some option. So, we pass them to the action - # via property. + # via property. fsa = [] fst = [] for s in sources: @@ -688,7 +714,7 @@ class LinkingGenerator (generators.Generator): if fst: add.append("" + '&&'.join(fst)) - spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name) + spawn = generators.Generator.generated_targets(self, sources2, prop_set.add_raw(add), project, name) return spawn @@ -701,12 +727,12 @@ class ArchiveGenerator (generators.Generator): """ def __init__ (self, id, composing, source_types, target_types_and_names, requirements): generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - + def run (self, project, name, prop_set, sources): sources += prop_set.get ('') - + result = generators.Generator.run (self, project, name, prop_set, sources) - + return result diff --git a/src/tools/bzip2.jam b/src/tools/bzip2.jam new file mode 100644 index 000000000..a994a70ef --- /dev/null +++ b/src/tools/bzip2.jam @@ -0,0 +1,226 @@ +# Copyright (c) 2010 Vladimir Prus. +# Copyright (c) 2013 Steven Watanabe +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE_1_0.txt or +# http://www.boost.org/LICENSE_1_0.txt) + +# Supports the bzip library +# +# After 'using bzip2', the following targets are available: +# +# /bzip2//bzip2 -- The bzip library + +import project ; +import ac ; +import errors ; +import "class" : new ; +import targets ; +import path ; +import modules ; +import errors ; +import indirect ; +import property ; +import property-set ; + +header = bzlib.h ; +names = bz2 ; + +sources = blocksort.c bzlib.c compress.c crctable.c + decompress.c huffman.c randtable.c ; + +library-id = 0 ; + +if --debug-configuration in [ modules.peek : ARGV ] +{ + .debug = true ; +} + +# Initializes the bzip library. +# +# bzip can be configured either to use pre-existing binaries +# or to build the library from source. +# +# Options for configuring a prebuilt bzip:: +# +# +# The directory containing the bzip binaries. +# +# Overrides the default library name. +# +# The directory containing the bzip headers. +# +# If none of these options is specified, then the environmental +# variables BZIP2_LIBRARY_PATH, BZIP2_NAME, and BZIP2_INCLUDE will +# be used instead. +# +# Options for building bzip from source:: +# +# +# The bzip source directory. Defaults to the environmental variable +# BZIP2_SOURCE. +# +# A rule which computes the actual name of the compiled +# libraries based on the build properties. Ignored +# when using precompiled binaries. +# +# The base name to use for the compiled library. Ignored +# when using precompiled binaries. +# +# Examples:: +# +# # Find bzip in the default system location +# using bzip2 ; +# # Build bzip from source +# using bzip2 : 1.0.6 : /home/sergey/src/bzip2-1.0.6 ; +# # Find bzip in /usr/local +# using bzip2 : 1.0.6 +# : /usr/local/include /usr/local/lib ; +# # Build bzip from source for msvc and find +# # prebuilt binaries for gcc. +# using bzip2 : 1.0.6 : C:/Devel/src/bzip2-1.0.6 : msvc ; +# using bzip2 : 1.0.6 : : gcc ; +# +rule init ( + version ? + # The bzip version (currently ignored) + + : options * + # A list of the options to use + + : requirements * + # The requirements for the bzip target + + : is-default ? + # Default configurations are only used when bzip + # has not yet been configured. + ) +{ + local caller = [ project.current ] ; + + if ! $(.initialized) + { + .initialized = true ; + + project.initialize $(__name__) ; + .project = [ project.current ] ; + project bzip2 ; + } + + local library-path = [ property.select : $(options) ] ; + library-path = $(library-path:G=) ; + local include-path = [ property.select : $(options) ] ; + include-path = $(include-path:G=) ; + local source-path = [ property.select : $(options) ] ; + source-path = $(source-path:G=) ; + local library-name = [ property.select : $(options) ] ; + library-name = $(library-name:G=) ; + local tag = [ property.select : $(options) ] ; + tag = $(tag:G=) ; + local build-name = [ property.select : $(options) ] ; + build-name = $(build-name:G=) ; + + condition = [ property-set.create $(requirements) ] ; + condition = [ property-set.create [ $(condition).base ] ] ; + + local no-build-from-source ; + # Ignore environmental BZIP2_SOURCE if this initialization + # requested to search for a specific pre-built library. + if $(library-path) || $(include-path) || $(library-name) + { + if $(source-path) || $(tag) || $(build-name) + { + errors.user-error "incompatible options for bzip2:" + [ property.select : $(options) ] "and" + [ property.select : $(options) ] ; + } + else + { + no-build-from-source = true ; + } + } + + source-path ?= [ modules.peek : BZIP2_SOURCE ] ; + + if $(.configured.$(condition)) + { + if $(is-default) + { + if $(.debug) + { + ECHO "notice: [bzip2] bzip is already configured" ; + } + } + else + { + errors.user-error "bzip is already configured" ; + } + return ; + } + else if $(source-path) && ! $(no-build-from-source) + { + build-name ?= bz2 ; + library-id = [ CALC $(library-id) + 1 ] ; + tag = [ MATCH ^@?(.*)$ : $(tag) ] ; + if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ] + { + tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ; + } + sources = [ path.glob $(source-path) : $(sources) ] ; + if $(.debug) + { + ECHO "notice: [bzip2] Building bzip from source as $(build-name)" ; + if $(condition) + { + ECHO "notice: [bzip2] Condition" [ $(condition).raw ] ; + } + if $(sources) + { + ECHO "notice: [bzip2] found bzip source in $(source-path)" ; + } + else + { + ECHO "warning: [bzip2] could not find bzip source in $(source-path)" ; + } + } + local target ; + if $(sources) { + target = [ targets.create-typed-target LIB : $(.project) + : $(build-name).$(library-id) + : $(sources) + : $(requirements) + @$(tag) + $(source-path) + msvc:_CRT_SECURE_NO_DEPRECATE + msvc:_SCL_SECURE_NO_DEPRECATE + shared:BZIP2_DLL + : + : $(source-path) ] ; + } + + local mt = [ new ac-library bzip2 : $(.project) : $(condition) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + if $(target) + { + $(mt).set-target $(target) ; + } + targets.main-target-alternative $(mt) ; + } else { + if $(.debug) + { + ECHO "notice: [bzip2] Using pre-installed library" ; + if $(condition) + { + ECHO "notice: [bzip2] Condition" [ $(condition).raw ] ; + } + } + + local mt = [ new ac-library bzip2 : $(.project) : $(condition) : + $(include-path) : $(library-path) : $(library-name) : $(root) ] ; + $(mt).set-header $(header) ; + $(mt).set-default-names $(names) ; + targets.main-target-alternative $(mt) ; + } + .configured.$(condition) = true ; +} diff --git a/src/tools/cast.py b/src/tools/cast.py index 8f053f110..0d21edb0b 100644 --- a/src/tools/cast.py +++ b/src/tools/cast.py @@ -25,18 +25,22 @@ # > cast, as defining a new target type + generator for that type is somewhat # > simpler than defining a main target rule. -import b2.build.targets as targets -import b2.build.virtual_target as virtual_target +from b2.build import targets, virtual_target, property_set from b2.manager import get_manager -from b2.util import bjam_signature +from b2.util import bjam_signature, is_iterable_typed + class CastTargetClass(targets.TypedTarget): - def construct(name, source_targets, ps): + def construct(self, name, source_targets, ps): + assert isinstance(name, basestring) + assert is_iterable_typed(source_targets, virtual_target.VirtualTarget) + assert isinstance(ps, property_set.PropertySet) + result = [] for s in source_targets: - if not isinstance(s, virtual_targets.FileTarget): + if not isinstance(s, virtual_target.FileTarget): get_manager().errors()("Source to the 'cast' metatager is not a file") if s.action(): @@ -46,18 +50,17 @@ class CastTargetClass(targets.TypedTarget): r = s.clone_with_different_type(self.type()) result.append(get_manager().virtual_targets().register(r)) - return result - + return property_set.empty(), result @bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"], ["default_build", "*"], ["usage_requirements", "*"])) def cast(name, type, sources, requirements, default_build, usage_requirements): - + from b2.manager import get_manager t = get_manager().targets() - + project = get_manager().projects().current() - + return t.main_target_alternative( CastTargetClass(name, project, type, t.main_target_sources(sources, name), diff --git a/src/tools/clang-darwin.jam b/src/tools/clang-darwin.jam index 51e5fad75..6b6e19857 100644 --- a/src/tools/clang-darwin.jam +++ b/src/tools/clang-darwin.jam @@ -28,6 +28,9 @@ generators.override clang-darwin.prebuilt : builtin.lib-generator ; generators.override clang-darwin.prebuilt : builtin.prebuilt ; generators.override clang-darwin.searched-lib-generator : searched-lib-generator ; +generators.register-c-compiler clang-darwin.compile.m : OBJECTIVE_C : OBJ : clang darwin ; +generators.register-c-compiler clang-darwin.compile.mm : OBJECTIVE_CPP : OBJ : clang darwin ; + toolset.inherit-rules clang-darwin : gcc ; toolset.inherit-flags clang-darwin : gcc : off on full space @@ -72,18 +75,22 @@ rule init ( version ? : command * : options * ) SPACE = " " ; -flags clang-darwin.compile OPTIONS ; -flags clang-darwin.compile.c++ OPTIONS ; -# flags clang-darwin.compile INCLUDES ; +toolset.flags clang-darwin.compile OPTIONS ; +toolset.flags clang-darwin.compile.c++ OPTIONS ; +toolset.flags clang-darwin.compile.m OPTIONS ; +toolset.flags clang-darwin.compile.mm OPTIONS ; +toolset.flags clang-darwin.compile.mm OPTIONS ; +# toolset.flags clang-darwin.compile INCLUDES ; # Declare flags and action for compilation. toolset.flags clang-darwin.compile OPTIONS off : -O0 ; toolset.flags clang-darwin.compile OPTIONS speed : -O3 ; toolset.flags clang-darwin.compile OPTIONS space : -Os ; +# For clang, 'on' and 'full' are identical toolset.flags clang-darwin.compile OPTIONS off : -fno-inline ; toolset.flags clang-darwin.compile OPTIONS on : -Wno-inline ; -toolset.flags clang-darwin.compile OPTIONS full : -finline-functions -Wno-inline ; +toolset.flags clang-darwin.compile OPTIONS full : -Wno-inline ; toolset.flags clang-darwin.compile OPTIONS off : -w ; toolset.flags clang-darwin.compile OPTIONS on : -Wall ; @@ -94,6 +101,8 @@ toolset.flags clang-darwin.compile OPTIONS on : -g ; toolset.flags clang-darwin.compile OPTIONS on : -pg ; toolset.flags clang-darwin.compile OPTIONS off : -fno-rtti ; +toolset.flags clang-darwin.compile OPTIONS ; + actions compile.c { "$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" @@ -104,6 +113,16 @@ actions compile.c++ "$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" } +actions compile.m +{ + "$(CONFIG_COMMAND)" -x objective-c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + +actions compile.mm +{ + "$(CONFIG_COMMAND)" -x objective-c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)" +} + flags clang-darwin ARFLAGS ; # Default value. Mostly for the sake of clang-linux @@ -166,5 +185,5 @@ actions link bind LIBRARIES actions link.dll bind LIBRARIES { - "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "$(<[1]:D=)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) + "$(CONFIG_COMMAND)" $(USER_OPTIONS) -L"$(LINKPATH)" -o "$(<)" -single_module -dynamiclib -install_name "@rpath/$(<[1]:D=)" "$(>)" "$(LIBRARIES)" $(START-GROUP) $(FINDLIBS-ST-PFX) -l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) $(OPTIONS) } diff --git a/src/tools/common.jam b/src/tools/common.jam index 8404febf9..ca9480be9 100644 --- a/src/tools/common.jam +++ b/src/tools/common.jam @@ -469,9 +469,10 @@ rule get-program-files-dir ( ) if [ os.name ] = NT { + NULL_DEVICE = "NUL" ; + IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE) & setlocal" ; RM = del /f /q ; CP = copy /b ; - IGNORE = "2>nul >nul & setlocal" ; LN ?= $(CP) ; # Ugly hack to convince copy to set the timestamp of the destination to the # current time by concatenating the source with a nonexistent file. Note @@ -479,13 +480,31 @@ if [ os.name ] = NT # /a (ascii). WINDOWS-CP-HACK = "+ this-file-does-not-exist-A698EE7806899E69" ; } +else if [ os.name ] = VMS +{ + NULL_DEVICE = "NL:" ; + PIPE = PIPE ; + IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE)" ; + RM = DELETE /NOCONF ; + CP = COPY /OVERWRITE ; + LN = $(CP) ; +} else { + NULL_DEVICE = "/dev/null" ; + IGNORE = "2>$(NULL_DEVICE) >$(NULL_DEVICE)" ; RM = rm -f ; CP = cp ; LN = ln ; } +NULL_OUT = ">$(NULL_DEVICE)" ; + +rule null-device ( ) +{ + return $(NULL_DEVICE) ; +} + rule rm-command ( ) { @@ -513,6 +532,13 @@ else q = "\"" ; } + +rule newline-char ( ) +{ + return $(nl) ; +} + + # Returns the command needed to set an environment variable on the current # platform. The variable setting persists through all following commands and is # visible in the environment seen by subsequently executed commands. In other @@ -525,6 +551,10 @@ rule variable-setting-command ( variable : value ) { return "set $(variable)=$(value)$(nl)" ; } + else if [ os.name ] = VMS + { + return "$(variable) == $(q)$(value)$(q)$(nl)" ; + } else { # If we do not have escape character support in bjam, the cod below @@ -584,6 +614,10 @@ rule file-creation-command ( ) # (22.02.2009.) (Jurko) return "echo. > " ; } + else if [ os.name ] = VMS + { + return "APPEND /NEW NL: " ; + } else { return "touch " ; @@ -601,6 +635,10 @@ rule file-touch-command ( ) { return "echo. >> " ; } + else if [ os.name ] = VMS + { + return "APPEND /NEW NL: " ; + } else { return "touch " ; @@ -679,6 +717,7 @@ else } } + actions piecemeal together existing Clean { $(RM) "$(>)" @@ -714,6 +753,35 @@ actions hard-link } +if [ os.name ] = VMS +{ + actions mkdir + { + IF F$PARSE("$(<:W)") .EQS. "" THEN CREATE /DIR $(<:W) + } + + actions piecemeal together existing Clean + { + $(RM) $(>:WJ=;*,);* + } + + actions copy + { + $(CP) $(>:WJ=,) $(<:W) + } + + actions quietly updated piecemeal together RmTemps + { + $(PIPE) $(RM) $(>:WJ=;*,);* $(IGNORE) + } + + actions hard-link + { + $(PIPE) $(RM) $(>[1]:W);* $(IGNORE) + $(PIPE) $(LN) $(>[1]:W) $(<:W) $(NULL_OUT) + } +} + # Given a target, as given to a custom tag rule, returns a string formatted # according to the passed format. Format is a list of properties that is # represented in the result. For each element of format the corresponding target diff --git a/src/tools/common.py b/src/tools/common.py index 443b3e92d..3f30baa56 100644 --- a/src/tools/common.py +++ b/src/tools/common.py @@ -17,31 +17,31 @@ import os.path import sys # for some reason this fails on Python 2.7(r27:82525) -# from b2.build import virtual_target +# from b2.build import virtual_target import b2.build.virtual_target from b2.build import feature, type from b2.util.utility import * -from b2.util import path +from b2.util import path, is_iterable_typed __re__before_first_dash = re.compile ('([^-]*)-') def reset (): """ Clear the module state. This is mainly for testing purposes. Note that this must be called _after_ resetting the module 'feature'. - """ + """ global __had_unspecified_value, __had_value, __declared_subfeature global __init_loc global __all_signatures, __debug_configuration, __show_configuration - + # Stores toolsets without specified initialization values. __had_unspecified_value = {} # Stores toolsets with specified initialization values. __had_value = {} - + # Stores toolsets with declared subfeatures. __declared_subfeature = {} - + # Stores all signatures of the toolsets. __all_signatures = {} @@ -70,7 +70,7 @@ def reset (): "HAIKU": "LIBRARY_PATH"} global __shared_library_path_variable __shared_library_path_variable = m.get(OS, "LD_LIBRARY_PATH") - + reset() def shared_library_path_variable(): @@ -112,6 +112,7 @@ class Configurations(object): Returns True if the configuration has been added and False if it already exists. Reports an error if the configuration is 'used'. """ + assert isinstance(id, basestring) if id in self.used_: #FIXME errors.error("common: the configuration '$(id)' is in use") @@ -132,6 +133,7 @@ class Configurations(object): 'used' and False if it the state wasn't changed. Reports an error if the configuration isn't known. """ + assert isinstance(id, basestring) if id not in self.all_: #FIXME: errors.error("common: the configuration '$(id)' is not known") @@ -154,10 +156,15 @@ class Configurations(object): def get(self, id, param): """ Returns the value of a configuration parameter. """ + assert isinstance(id, basestring) + assert isinstance(param, basestring) return self.params_.get(param, {}).get(id) def set (self, id, param, value): """ Sets the value of a configuration parameter. """ + assert isinstance(id, basestring) + assert isinstance(param, basestring) + assert is_iterable_typed(value, basestring) self.params_.setdefault(param, {})[id] = value # Ported from trunk@47174 @@ -174,14 +181,11 @@ def check_init_parameters(toolset, requirement, *args): The return value from this rule is a condition to be used for flags settings. """ + assert isinstance(toolset, basestring) + assert is_iterable_typed(requirement, basestring) from b2.build import toolset as b2_toolset if requirement is None: requirement = [] - # The type checking here is my best guess about - # what the types should be. - assert(isinstance(toolset, str)) - # iterable and not a string, allows for future support of sets - assert(not isinstance(requirement, basestring) and hasattr(requirement, '__contains__')) sig = toolset condition = replace_grist(toolset, '') subcondition = [] @@ -193,7 +197,7 @@ def check_init_parameters(toolset, requirement, *args): value = arg[1] assert(isinstance(name, str)) assert(isinstance(value, str) or value is None) - + str_toolset_name = str((toolset, name)) # FIXME: is this the correct translation? @@ -235,7 +239,7 @@ def check_init_parameters(toolset, requirement, *args): __had_unspecified_value[str_toolset_name] = True if value == None: value = '' - + sig = sig + value + '-' # if a requirement is specified, the signature should be unique @@ -245,12 +249,12 @@ def check_init_parameters(toolset, requirement, *args): if __all_signatures.has_key(sig): message = "duplicate initialization of '%s' with the following parameters: " % toolset - + for arg in args: name = arg[0] value = arg[1] if value == None: value = '' - + message += "'%s' = '%s'\n" % (name, value) raise BaseException(message) @@ -285,38 +289,35 @@ def get_invocation_command_nodefault( 'user-provided-command' is not given, tries to find binary named 'tool' in PATH and in the passed 'additional-path'. Otherwise, verifies that the first element of 'user-provided-command' is an existing program. - + This rule returns the command to be used when invoking the tool. If we can't find the tool, a warning is issued. If 'path-last' is specified, PATH is checked after 'additional-paths' when searching for 'tool'. """ - assert(isinstance(toolset, str)) - assert(isinstance(tool, str)) - assert(isinstance(user_provided_command, list)) - if additional_paths is not None: - assert(isinstance(additional_paths, list)) - assert(all([isinstance(path, str) for path in additional_paths])) - assert(all(isinstance(path, str) for path in additional_paths)) - assert(isinstance(path_last, bool)) - + assert isinstance(toolset, basestring) + assert isinstance(tool, basestring) + assert is_iterable_typed(user_provided_command, basestring) + assert is_iterable_typed(additional_paths, basestring) or additional_paths is None + assert isinstance(path_last, (int, bool)) + if not user_provided_command: - command = find_tool(tool, additional_paths, path_last) + command = find_tool(tool, additional_paths, path_last) if not command and __debug_configuration: print "warning: toolset", toolset, "initialization: can't find tool, tool" #FIXME #print "warning: initialized from" [ errors.nearest-user-location ] ; else: command = check_tool(user_provided_command) - assert(isinstance(command, list)) - command=' '.join(command) if not command and __debug_configuration: print "warning: toolset", toolset, "initialization:" print "warning: can't find user-provided command", user_provided_command #FIXME #ECHO "warning: initialized from" [ errors.nearest-user-location ] + command = [] + command = ' '.join(command) assert(isinstance(command, str)) - + return command # ported from trunk@47174 @@ -325,14 +326,11 @@ def get_invocation_command(toolset, tool, user_provided_command = [], """ Same as get_invocation_command_nodefault, except that if no tool is found, returns either the user-provided-command, if present, or the 'tool' parameter. """ - - assert(isinstance(toolset, str)) - assert(isinstance(tool, str)) - assert(isinstance(user_provided_command, list)) - if additional_paths is not None: - assert(isinstance(additional_paths, list)) - assert(all([isinstance(path, str) for path in additional_paths])) - assert(isinstance(path_last, bool)) + assert isinstance(toolset, basestring) + assert isinstance(tool, basestring) + assert is_iterable_typed(user_provided_command, basestring) + assert is_iterable_typed(additional_paths, basestring) or additional_paths is None + assert isinstance(path_last, (int, bool)) result = get_invocation_command_nodefault(toolset, tool, user_provided_command, @@ -346,7 +344,7 @@ def get_invocation_command(toolset, tool, user_provided_command = [], result = tool assert(isinstance(result, str)) - + return result # ported from trunk@47281 @@ -356,6 +354,7 @@ def get_absolute_tool_path(command): return the absolute path to the command. This works even if commnad has not path element and is present in PATH. """ + assert isinstance(command, basestring) if os.path.dirname(command): return os.path.dirname(command) else: @@ -376,9 +375,9 @@ def find_tool(name, additional_paths = [], path_last = False): Otherwise, returns the empty string. If 'path_last' is specified, path is checked after 'additional_paths'. """ - assert(isinstance(name, str)) - assert(isinstance(additional_paths, list)) - assert(isinstance(path_last, bool)) + assert isinstance(name, basestring) + assert is_iterable_typed(additional_paths, basestring) + assert isinstance(path_last, (int, bool)) programs = path.programs_path() match = path.glob(programs, [name, name + '.exe']) @@ -407,7 +406,7 @@ def check_tool_aux(command): """ Checks if 'command' can be found either in path or is a full name to an existing file. """ - assert(isinstance(command, str)) + assert isinstance(command, basestring) dirname = os.path.dirname(command) if dirname: if os.path.exists(command): @@ -425,13 +424,12 @@ def check_tool_aux(command): # ported from trunk@47281 def check_tool(command): - """ Checks that a tool can be invoked by 'command'. + """ Checks that a tool can be invoked by 'command'. If command is not an absolute path, checks if it can be found in 'path'. If comand is absolute path, check that it exists. Returns 'command' if ok and empty string otherwise. """ - assert(isinstance(command, list)) - assert(all(isinstance(c, str) for c in command)) + assert is_iterable_typed(command, basestring) #FIXME: why do we check the first and last elements???? if check_tool_aux(command[0]) or check_tool_aux(command[-1]): return command @@ -449,11 +447,10 @@ def handle_options(tool, condition, command, options): """ from b2.build import toolset - assert(isinstance(tool, str)) - assert(isinstance(condition, list)) - assert(isinstance(command, str)) - assert(isinstance(options, list)) - assert(command) + assert isinstance(tool, basestring) + assert is_iterable_typed(condition, basestring) + assert command and isinstance(command, basestring) + assert is_iterable_typed(options, basestring) toolset.flags(tool, 'CONFIG_COMMAND', condition, [command]) toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('', options)) toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('', options)) @@ -490,8 +487,8 @@ def variable_setting_command(variable, value): words, on Unix systems, the variable is exported, which is consistent with the only possible behavior on Windows systems. """ - assert(isinstance(variable, str)) - assert(isinstance(value, str)) + assert isinstance(variable, basestring) + assert isinstance(value, basestring) if os_name() == 'NT': return "set " + variable + "=" + value + os.linesep @@ -533,8 +530,8 @@ def path_variable_setting_command(variable, paths): Returns a command to sets a named shell path variable to the given NATIVE paths on the current platform. """ - assert(isinstance(variable, str)) - assert(isinstance(paths, list)) + assert isinstance(variable, basestring) + assert is_iterable_typed(paths, basestring) sep = os.path.pathsep return variable_setting_command(variable, sep.join(paths)) @@ -542,7 +539,10 @@ def prepend_path_variable_command(variable, paths): """ Returns a command that prepends the given paths to the named path variable on the current platform. - """ + """ + assert isinstance(variable, basestring) + assert is_iterable_typed(paths, basestring) + return path_variable_setting_command(variable, paths + os.environ.get(variable, "").split(os.pathsep)) @@ -562,6 +562,7 @@ __mkdir_set = set() __re_windows_drive = re.compile(r'^.*:\$') def mkdir(engine, target): + assert isinstance(target, basestring) # If dir exists, do not update it. Do this even for $(DOT). bjam.call('NOUPDATE', target) @@ -590,7 +591,7 @@ def mkdir(engine, target): if os_name() == 'NT': if(__re_windows_drive.match(s)): s = '' - + if s: if s != target: engine.add_dependency(target, s) @@ -642,9 +643,12 @@ def format_name(format, name, target_type, prop_set): The returned name also has the target type specific prefix and suffix which puts it in a ready form to use as the value from a custom tag rule. """ - assert(isinstance(format, list)) - assert(isinstance(name, str)) - assert(isinstance(target_type, str) or not type) + if __debug__: + from ..build.property_set import PropertySet + assert is_iterable_typed(format, basestring) + assert isinstance(name, basestring) + assert isinstance(target_type, basestring) + assert isinstance(prop_set, PropertySet) # assert(isinstance(prop_set, property_set.PropertySet)) if type.is_derived(target_type, 'LIB'): result = "" ; @@ -653,7 +657,7 @@ def format_name(format, name, target_type, prop_set): if grist == '': result += os.path.basename(name) elif grist == '': - result += join_tag(get_value(f), + result += join_tag(get_value(f), toolset_tag(name, target_type, prop_set)) elif grist == '': result += join_tag(get_value(f), @@ -690,6 +694,8 @@ def format_name(format, name, target_type, prop_set): return result def join_tag(joiner, tag): + assert isinstance(joiner, basestring) + assert isinstance(tag, basestring) if tag: if not joiner: joiner = '-' return joiner + tag @@ -698,6 +704,11 @@ def join_tag(joiner, tag): __re_toolset_version = re.compile(r"(\d+)[.](\d*)") def toolset_tag(name, target_type, prop_set): + if __debug__: + from ..build.property_set import PropertySet + assert isinstance(name, basestring) + assert isinstance(target_type, basestring) + assert isinstance(prop_set, PropertySet) tag = '' properties = prop_set.raw() @@ -708,7 +719,7 @@ def toolset_tag(name, target_type, prop_set): elif tools.startswith('como'): tag += 'como' elif tools.startswith('cw'): tag += 'cw' elif tools.startswith('darwin'): tag += 'xgcc' - elif tools.startswith('edg'): tag += edg + elif tools.startswith('edg'): tag += 'edg' elif tools.startswith('gcc'): flavor = prop_set.get('') ''.find @@ -764,6 +775,11 @@ def toolset_tag(name, target_type, prop_set): def threading_tag(name, target_type, prop_set): + if __debug__: + from ..build.property_set import PropertySet + assert isinstance(name, basestring) + assert isinstance(target_type, basestring) + assert isinstance(prop_set, PropertySet) tag = '' properties = prop_set.raw() if 'multi' in properties: tag = 'mt' @@ -772,6 +788,11 @@ def threading_tag(name, target_type, prop_set): def runtime_tag(name, target_type, prop_set ): + if __debug__: + from ..build.property_set import PropertySet + assert isinstance(name, basestring) + assert isinstance(target_type, basestring) + assert isinstance(prop_set, PropertySet) tag = '' properties = prop_set.raw() @@ -847,13 +868,13 @@ def init(manager): __CP = 'cp' __IGNORE = '' __LN = 'ln' - + engine.register_action("common.Clean", __RM + ' "$(>)"', flags=['piecemeal', 'together', 'existing']) engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"') engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE, flags=['quietly', 'updated', 'piecemeal', 'together']) - engine.register_action("common.hard-link", + engine.register_action("common.hard-link", __RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep + __LN + ' "$(>)" "$(<)" $(NULL_OUT)') diff --git a/src/tools/darwin.py b/src/tools/darwin.py index c29196060..f03d63f35 100644 --- a/src/tools/darwin.py +++ b/src/tools/darwin.py @@ -21,11 +21,11 @@ def init (version = None, command = None, options = None): options = to_seq (options) condition = common.check_init_parameters ('darwin', None, ('version', version)) - + command = common.get_invocation_command ('darwin', 'g++', command) common.handle_options ('darwin', condition, command, options) - + gcc.init_link_flags ('darwin', 'darwin', condition) # Darwin has a different shared library suffix diff --git a/src/tools/docutils.jam b/src/tools/docutils.jam index 02b2794b2..d5c99b184 100644 --- a/src/tools/docutils.jam +++ b/src/tools/docutils.jam @@ -51,13 +51,20 @@ rule init ( docutils-dir ? : tools-dir ? ) if ! $(.initialized) { .initialized = true ; - .docutils-dir = $(docutils-dir) ; - .tools-dir = $(tools-dir:R="") ; - - .setup = [ - common.prepend-path-variable-command PYTHONPATH - : $(.docutils-dir) $(.docutils-dir)/extras ] ; - RST2XXX = [ common.find-tool rst2html ] ; + if $(docutils-dir) + { + .docutils-dir = $(docutils-dir) ; + .tools-dir = $(tools-dir:R="") ; + + .setup = [ + common.prepend-path-variable-command PYTHONPATH + : $(.docutils-dir) $(.docutils-dir)/extras ] ; + RST2XXX = [ common.find-tool rst2html ] ; + } + else + { + RST2XXX_PY = [ common.find-tool rst2html.py ] ; + } } } @@ -66,7 +73,7 @@ rule html ( target : source : properties * ) if ! [ on $(target) return $(RST2XXX) ] { local python-cmd = [ property.select : $(properties) ] ; - if ! $(.tools-dir) { + if ! $(.tools-dir) && ! $(RST2XXX_PY) { errors.user-error "The docutils module is used, but not configured. " : "" @@ -79,7 +86,26 @@ rule html ( target : source : properties * ) : "On Windows, you can install from http://docutils.sourceforge.net/." ; } - RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ; + + if $(RST2XXX_PY) + { + if $(RST2XXX_PY:D) + { + # If we have a path to the rst2html.py script, we need to use + # the python interpreter to load it up. + RST2XXX on $(target) = $(python-cmd:G=:E="python") $(RST2XXX_PY) ; + } + else + { + # Otherwise, bare rst2html.py, we can just exec that directly. + # This work for both Nix, and the standard Windows Python installs. + RST2XXX on $(target) = $(RST2XXX_PY) ; + } + } + else + { + RST2XXX on $(target) = $(python-cmd:G=:E="python") $(.tools-dir)/rst2html.py ; + } } } diff --git a/src/tools/doxproc.py b/src/tools/doxproc.py index 4cbd5edd2..c41d7fde1 100644 --- a/src/tools/doxproc.py +++ b/src/tools/doxproc.py @@ -17,7 +17,7 @@ import glob import re import xml.dom.minidom - + def usage(): print ''' Usage: @@ -49,11 +49,11 @@ def get_args( argv = sys.argv[1:] ): } ( option_pairs, other ) = getopt.getopt( argv, '', spec ) map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs ) - + if options.has_key( '--help' ): usage() sys.exit(1) - + return { 'xmldir' : options['--xmldir'], 'output' : options['--output'], @@ -69,7 +69,7 @@ def if_attribute(node, attribute, true_value, false_value=None): return false_value class Doxygen2BoostBook: - + def __init__( self, **kwargs ): ## self.args = kwargs @@ -113,17 +113,17 @@ class Doxygen2BoostBook: self.idmap = {} #~ Marks generation, to prevent redoing it. self.generated = False - + #~ Add an Doxygen generated XML document to the content we are translating. def addDox( self, document ): self._translateNode(document.documentElement) - + #~ Turns the internal XML tree into an output UTF-8 string. def tostring( self ): self._generate() #~ return self.boostbook.toprettyxml(' ') return self.boostbook.toxml('utf-8') - + #~ Does post-processing on the partial generated content to generate additional info #~ now that we have the complete source documents. def _generate( self ): @@ -146,7 +146,7 @@ class Doxygen2BoostBook: del self.idmap[self.symbols[symbol]['id']] container.appendChild(self.symbols[symbol]['dom']) self._rewriteIDs(self.boostbook.documentElement) - + #~ Rewrite the various IDs from Doxygen references to the newly created #~ BoostBook references. def _rewriteIDs( self, node ): @@ -166,7 +166,7 @@ class Doxygen2BoostBook: self._rewriteIDs(node.firstChild) if node.nextSibling: self._rewriteIDs(node.nextSibling) - + def _resolveContainer( self, cpp, root ): container = root for ns in cpp['namespace']: @@ -181,11 +181,11 @@ class Doxygen2BoostBook: break container = node return container - + def _setID( self, id, name ): self.idmap[id] = name.replace('::','.').replace('/','.') #~ print '--| setID:',id,'::',self.idmap[id] - + #~ Translate a given node within a given context. #~ The translation dispatches to a local method of the form #~ "_translate[_context0,...,_contextN]", and the keyword args are @@ -208,7 +208,7 @@ class Doxygen2BoostBook: if hasattr(self,name): return getattr(self,name)(node,**kwargs) return None - + #~ Translates the children of the given parent node, appending the results #~ to the indicated target. For nodes not translated by the translation method #~ it copies the child over and recurses on that child to translate any @@ -225,7 +225,7 @@ class Doxygen2BoostBook: child.data = re.sub(r'\s+',' ',child.data) target.appendChild(child) self._translateChildren(n,target=child) - + #~ Translate the given node as a description, into the description subnode #~ of the target. If no description subnode is present in the target it #~ is created. @@ -235,7 +235,7 @@ class Doxygen2BoostBook: description = target.appendChild(self._createNode(tag)) self._translateChildren(node,target=description) return description - + #~ Top level translation of: ..., #~ translates the children. def _translate_doxygen( self, node ): @@ -246,7 +246,7 @@ class Doxygen2BoostBook: if newNode: result.append(newNode) return result - + #~ Top level translation of: #~ #~ @@ -294,7 +294,7 @@ class Doxygen2BoostBook: self._translate_index_(entries,target=self.section['index']) self._translate_index_(classes,target=self.section['classes']) return None - + #~ Translate a set of index entries in the BoostBook output. The output #~ is grouped into groups of the first letter of the entry names. def _translate_index_(self, entries, target=None, **kwargs ): @@ -314,12 +314,12 @@ class Doxygen2BoostBook: 'link',entries[i]['compoundname'],linkend=entries[i]['id'])) ie.appendChild(self.boostbook.createTextNode(')')) i += 1 - + #~ Translate a ..., #~ by retranslating with the "kind" of compounddef. def _translate_compounddef( self, node, target=None, **kwargs ): return self._translateNode(node,node.getAttribute('kind')) - + #~ Translate a .... For #~ namespaces we just collect the information for later use as there is no #~ currently namespaces are not included in the BoostBook format. In the future @@ -343,12 +343,12 @@ class Doxygen2BoostBook: self.symbols[namespace['name']] = namespace #~ self._setID(namespace['id'],namespace['name']) return None - + #~ Translate a ..., which #~ forwards to the kind=struct as they are the same. def _translate_compounddef_class( self, node, target=None, **kwargs ): return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs) - + #~ Translate a ... into: #~
#~ @@ -381,7 +381,7 @@ class Doxygen2BoostBook: self._translateNode(n,target=struct,scope=compoundname['compoundname']) result = struct return result - + #~ Translate a ..., def _translate_compounddef_includes_( self, node, target=None, **kwargs ): name = node.firstChild.data @@ -395,7 +395,7 @@ class Doxygen2BoostBook: name=name) } return None - + #~ Translate a ... into: #~ #~ ... @@ -405,7 +405,7 @@ class Doxygen2BoostBook: access=ref.getAttribute('prot'))) self._translateChildren(ref,target=inherit) return - + #~ Translate: #~ #~ @@ -448,7 +448,7 @@ class Doxygen2BoostBook: value = self._getData(defval) templateParam.appendChild(self._createText('default',value)) return template - + #~ Translate: #~ ... #~ Into: @@ -456,37 +456,37 @@ class Doxygen2BoostBook: def _translate_briefdescription( self, brief, target=None, **kwargs ): self._translateDescription(brief,target=target,**kwargs) return self._translateDescription(brief,target=target,tag='purpose',**kwargs) - + #~ Translate: #~ ... #~ Into: #~ ... def _translate_detaileddescription( self, detailed, target=None, **kwargs ): return self._translateDescription(detailed,target=target,**kwargs) - + #~ Translate: #~ ... #~ With kind specific translation. def _translate_sectiondef( self, sectiondef, target=None, **kwargs ): self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs) - + #~ Translate non-function sections. def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ): for n in sectiondef.childNodes: if hasattr(n,'getAttribute'): self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs) return None - + #~ Translate: #~ ... def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ): return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) - + #~ Translate: #~ ... def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs): return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs) - + #~ Translate: #~ ... #~ All the various function group translations end up here for which @@ -500,31 +500,31 @@ class Doxygen2BoostBook: if hasattr(n,'getAttribute'): self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs) return members - + #~ Translate: #~ ... def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ): return self._translate_sectiondef_func_(sectiondef, name='public member functions',target=target,**kwargs) - + #~ Translate: #~ ... def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs): return self._translate_sectiondef_func_(sectiondef, name='public static functions',target=target,**kwargs) - + #~ Translate: #~ ... def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ): return self._translate_sectiondef_func_(sectiondef, name='protected member functions',target=target,**kwargs) - + #~ Translate: #~ ... def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs): return self._translate_sectiondef_func_(sectiondef, name='private static functions',target=target,**kwargs) - + #~ Translate: #~ ... def _translate_sectiondef_private_func( self, sectiondef, target=None, **kwargs ): @@ -536,7 +536,7 @@ class Doxygen2BoostBook: def _translate_sectiondef_user_defined( self, sectiondef, target=None, **kwargs ): return self._translate_sectiondef_func_(sectiondef, name=self._getChildData('header', root=sectiondef),target=target,**kwargs) - + #~ Translate: #~ #~ ... @@ -554,7 +554,7 @@ class Doxygen2BoostBook: typedef_type = typedef.appendChild(self._createNode('type')) self._translate_type(self._getChild('type',root=memberdef),target=typedef_type) return typedef - + #~ Translate: #~ #~ ... @@ -594,13 +594,13 @@ class Doxygen2BoostBook: for n in memberdef.childNodes: self._translateNode(memberdef,'function',n,target=method) return method - + #~ Translate: #~ ... def _translate_memberdef_function_templateparamlist( self, templateparamlist, target=None, **kwargs ): return self._translate_templateparamlist(templateparamlist,target=target,**kwargs) - + #~ Translate: #~ ... #~ To: @@ -611,7 +611,7 @@ class Doxygen2BoostBook: if methodType.hasChildNodes(): target.appendChild(methodType) return methodType - + #~ Translate: #~ ... def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ): @@ -621,22 +621,22 @@ class Doxygen2BoostBook: ## on the previous line, don't bother with the repetition. # result = self._translateDescription(description,target=target,tag='purpose',**kwargs) return result - + #~ Translate: #~ ... def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ): return self._translateDescription(description,target=target,**kwargs) - + #~ Translate: #~ ... def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ): return self._translateDescription(description,target=target,**kwargs) - + #~ Translate: #~ ... def _translate_memberdef_function_param( self, param, target=None, **kwargs ): return self._translate_param(param,target=target,**kwargs) - + #~ Translate: #~ #~ ... @@ -654,7 +654,7 @@ class Doxygen2BoostBook: name=self._getChildData('name',root=memberdef))) data_member_type = data_member.appendChild(self._createNode('type')) self._translate_type(self._getChild('type',root=memberdef),target=data_member_type) - + #~ Translate: #~ #~ ... @@ -673,7 +673,7 @@ class Doxygen2BoostBook: for n in memberdef.childNodes: self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs) return enum - + #~ Translate: #~ #~ @@ -696,7 +696,7 @@ class Doxygen2BoostBook: self._translateChildren(initializer, target=target.appendChild(self._createNode('default'))) return value - + #~ Translate: #~ #~ ... @@ -717,12 +717,12 @@ class Doxygen2BoostBook: if defval: self._translateChildren(self._getChild('defval',root=param),target=parameter) return parameter - + #~ Translate: #~ ... def _translate_ref( self, ref, **kwargs ): return self._translateNode(ref,ref.getAttribute('kindref')) - + #~ Translate: #~ ... #~ To: @@ -732,7 +732,7 @@ class Doxygen2BoostBook: classname = result.appendChild(self._createNode('classname')) self._translateChildren(ref,target=classname) return result - + #~ Translate: #~ ... #~ To: @@ -741,7 +741,7 @@ class Doxygen2BoostBook: result = self._createNode('link',linkend=ref.getAttribute('refid')) self._translateChildren(ref,target=result) return result - + #~ Translate: #~ ... def _translate_type( self, type, target=None, **kwargs ): @@ -758,7 +758,7 @@ class Doxygen2BoostBook: target.removeChild(target.firstChild) target.appendChild(self._createText('emphasis','unspecified')) return result - + def _getChild( self, tag = None, id = None, name = None, root = None ): if not root: root = self.boostbook.documentElement @@ -777,17 +777,17 @@ class Doxygen2BoostBook: #~ print '--|', n return n return None - + def _getChildData( self, tag, **kwargs ): return self._getData(self._getChild(tag,**kwargs),**kwargs) - + def _getData( self, node, **kwargs ): if node: text = self._getChild('#text',root=node) if text: return text.data.strip() return '' - + def _cppName( self, type ): parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':')) result = { @@ -806,7 +806,7 @@ class Doxygen2BoostBook: result['name'] = result['namespace'].pop()+'::'+result['name'] namespace = '::'.join(result['namespace']) return result - + def _createNode( self, tag, **kwargs ): result = self.boostbook.createElement(tag) for k in kwargs.keys(): @@ -816,7 +816,7 @@ class Doxygen2BoostBook: else: result.setAttribute(k,kwargs[k]) return result - + def _createText( self, tag, data, **kwargs ): result = self._createNode(tag,**kwargs) data = data.strip() @@ -827,7 +827,7 @@ class Doxygen2BoostBook: def main( xmldir=None, output=None, id=None, title=None, index=False ): #~ print '--- main: xmldir = %s, output = %s' % (xmldir,output) - + input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) ) input.sort translator = Doxygen2BoostBook(id=id, title=title, index=index) @@ -847,7 +847,7 @@ def main( xmldir=None, output=None, id=None, title=None, index=False ): for dox in decl_files: #~ print '--|',os.path.basename(dox) translator.addDox(xml.dom.minidom.parse(dox)) - + if output: output = open(output,'w') else: diff --git a/src/tools/doxygen.jam b/src/tools/doxygen.jam index 6a56ccdc2..f53a227c1 100644 --- a/src/tools/doxygen.jam +++ b/src/tools/doxygen.jam @@ -619,6 +619,7 @@ rule check-tools ( target ) rule doxygen ( target : sources + : requirements * : default-build * : usage-requirements * ) { + requirements += none ; freeze-config ; local project = [ project.current ] ; @@ -726,7 +727,7 @@ rule doxygen ( target : sources + : requirements * : default-build * : images-location = $(images-location)/ ; } - requirements += + requirements += $(target).doxygen-xml-images boost.doxygen.formuladir=$(images-location) ; } diff --git a/src/tools/gcc.jam b/src/tools/gcc.jam index db0453461..e94eced20 100644 --- a/src/tools/gcc.jam +++ b/src/tools/gcc.jam @@ -96,7 +96,8 @@ rule init ( version ? : command * : options * ) ; if $(tool-command) { - local tool-command-string = $(tool-command:J=" ") ; + local tool-command-string = \"$(tool-command)\" ; + tool-command-string = $(tool-command-string:J=" ") ; local tool-version = [ MATCH "^([0-9.]+)" : [ SHELL "$(tool-command-string) -dumpversion" ] ] ; if $(tool-version) != $(version) @@ -153,9 +154,11 @@ rule init ( version ? : command * : options * ) # Information about the gcc command... # The command. local command = $(tool-command) ; - # The 'command' variable can have multiple elements but when calling the - # SHELL builtin we need a single string. - local command-string = $(command:J=" ") ; + # The 'command' variable can have multiple elements but when calling the + # SHELL builtin we need a single string, and we need to quote elements + # with spaces. + local command-string = \"$(command)\" ; + command-string = $(command-string:J=" ") ; # The root directory of the tool install. local root = [ feature.get-values : $(options) ] ; # The bin directory where to find the command to execute. @@ -451,7 +454,7 @@ rule setup-address-model ( targets * : sources * : properties * ) else { local arch = [ feature.get-values architecture : $(properties) ] ; - if $(arch) != arm + if $(arch) = power || $(arch) = sparc || $(arch) = x86 { if $(model) = 32 { @@ -1138,8 +1141,7 @@ cpu-flags gcc OPTIONS : x86 : c3-2 : -march=c3-2 ; ## cpu-flags gcc OPTIONS : x86 : atom : -march=atom ; # Sparc -cpu-flags gcc OPTIONS : sparc : c3 : -mcpu=c3 : default ; -cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 ; +cpu-flags gcc OPTIONS : sparc : v7 : -mcpu=v7 : default ; cpu-flags gcc OPTIONS : sparc : cypress : -mcpu=cypress ; cpu-flags gcc OPTIONS : sparc : v8 : -mcpu=v8 ; cpu-flags gcc OPTIONS : sparc : supersparc : -mcpu=supersparc ; diff --git a/src/tools/gcc.py b/src/tools/gcc.py index a13ce7ad2..bc810e489 100644 --- a/src/tools/gcc.py +++ b/src/tools/gcc.py @@ -38,7 +38,7 @@ __debug = None def debug(): global __debug if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") + __debug = "--debug-configuration" in bjam.variable("ARGV") return __debug feature.extend('toolset', ['gcc']) @@ -333,7 +333,7 @@ flags('gcc.compile', 'INCLUDES', [], ['']) engine = get_manager().engine() -engine.register_action('gcc.compile.c++.pch', +engine.register_action('gcc.compile.c++.pch', '"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"') engine.register_action('gcc.compile.c.pch', @@ -360,7 +360,7 @@ def gcc_compile_c(targets, sources, properties): engine.set_target_variable (targets, 'LANG', '-x c') #} engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE')) - + engine.register_action( 'gcc.compile.c++', '"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' + @@ -789,8 +789,7 @@ cpu_flags('gcc', 'OPTIONS', 'x86', 'atom', ['-march=atom']) # Sparc flags('gcc', 'OPTIONS', ['sparc/32'], ['-m32']) flags('gcc', 'OPTIONS', ['sparc/64'], ['-m64']) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True) -cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7']) +cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'], default=True) cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress']) cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8']) cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc']) diff --git a/src/tools/intel-win.jam b/src/tools/intel-win.jam index bccdb1fa3..d50116796 100644 --- a/src/tools/intel-win.jam +++ b/src/tools/intel-win.jam @@ -474,6 +474,8 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] .iclvars-13.0-supported-vcs = "11.0 10.0 9.0" ; .iclvars-14.0-supported-vcs = "12.0 11.0 10.0 9.0" ; .iclvars-15.0-supported-vcs = "12.0 11.0 10.0 9.0" ; +.iclvars-16.0-supported-vcs = "14.0 12.0 11.0 10.0 9.0" ; +.iclvars-version-alias-vc14 = vs2015 ; .iclvars-version-alias-vc12 = vs2013 ; .iclvars-version-alias-vc11 = vs2012 ; .iclvars-version-alias-vc10 = vs2010 ; diff --git a/src/tools/make.py b/src/tools/make.py index 10baa1cb4..716a56119 100644 --- a/src/tools/make.py +++ b/src/tools/make.py @@ -1,12 +1,12 @@ # Status: ported. # Base revision: 64068 -# Copyright 2003 Dave Abrahams -# Copyright 2003 Douglas Gregor -# Copyright 2006 Rene Rivera -# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Dave Abrahams +# Copyright 2003 Douglas Gregor +# Copyright 2006 Rene Rivera +# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # This module defines the 'make' main target rule. @@ -18,13 +18,13 @@ import b2.build.property_set class MakeTarget(BasicTarget): - + def construct(self, name, source_targets, property_set): - action_name = property_set.get("")[0] + action_name = property_set.get("")[0] action = Action(get_manager(), source_targets, action_name[1:], property_set) target = FileTarget(self.name(), type.type(self.name()), - self.project(), action, exact=True) + self.project(), action, exact=True) return [ b2.build.property_set.empty(), [self.project().manager().virtual_targets().register(target)]] @@ -39,9 +39,9 @@ def make (target_name, sources, generating_rule, if not requirements: requirements = [] - + requirements.append("%s" % generating_rule) - + m = get_manager() targets = m.targets() project = m.projects().current() diff --git a/src/tools/mc.py b/src/tools/mc.py index c194acdff..9992c36c1 100644 --- a/src/tools/mc.py +++ b/src/tools/mc.py @@ -7,7 +7,7 @@ # Support for Microsoft message compiler tool. # Notes: -# - there's just message compiler tool, there's no tool for +# - there's just message compiler tool, there's no tool for # extracting message strings from sources # - This file allows to use Microsoft message compiler # with any toolset. In msvc.jam, there's more specific diff --git a/src/tools/message.py b/src/tools/message.py index cc0b946ff..5ec3efc76 100644 --- a/src/tools/message.py +++ b/src/tools/message.py @@ -26,7 +26,7 @@ class MessageTargetClass(targets.BasicTarget): if not self.built: for arg in self.args: if type(arg) == type([]): - arg = " ".join(arg) + arg = " ".join(arg) print arg self.built = True @@ -38,9 +38,9 @@ def message(name, *args): name = name[0] t = get_manager().targets() - + project = get_manager().projects().current() - + return t.main_target_alternative(MessageTargetClass(*((name, project) + args))) get_manager().projects().add_rule("message", message) diff --git a/src/tools/midl.py b/src/tools/midl.py index 86c1f34b6..51bc51feb 100644 --- a/src/tools/midl.py +++ b/src/tools/midl.py @@ -19,7 +19,7 @@ def init(): type.register('IDL', ['idl']) # A type library (.tlb) is generated by MIDL compiler and can be included -# to resources of an application (.rc). In order to be found by a resource +# to resources of an application (.rc). In order to be found by a resource # compiler its target type should be derived from 'H' - otherwise # the property '' will be ignored. type.register('MSTYPELIB', ['tlb'], 'H') @@ -30,10 +30,10 @@ class MidlScanner(scanner.Scanner): scanner.Scanner.__init__(self) self.includes = includes - # List of quoted strings + # List of quoted strings re_strings = "[ \t]*\"([^\"]*)\"([ \t]*,[ \t]*\"([^\"]*)\")*[ \t]*" ; - # 'import' and 'importlib' directives + # 'import' and 'importlib' directives self.re_import = "import" + re_strings + "[ \t]*;" ; self.re_importlib = "importlib[ \t]*[(]" + re_strings + "[)][ \t]*;" ; @@ -58,8 +58,8 @@ class MidlScanner(scanner.Scanner): # Attach binding of including file to included targets. # When target is directly created from virtual target # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. + # cases, it allows to distinguish between two headers of the + # same name included from different places. g2 = g + "#" + b g = "<" + g + ">" @@ -79,7 +79,7 @@ class MidlScanner(scanner.Scanner): engine.set_target_variable(included_quoted, 'SEARCH', [utility.get_value(inc) for inc in self.includes]) engine.set_target_variable(imported , 'SEARCH', [utility.get_value(inc) for inc in self.includes]) engine.set_target_variable(imported_tlbs , 'SEARCH', [utility.get_value(inc) for inc in self.includes]) - + get_manager().scanners().propagate(type.get_scanner('CPP', PropertySet(self.includes)), included_angle + included_quoted) get_manager().scanners().propagate(self, imported) @@ -113,9 +113,9 @@ flags('midl.compile.idl', 'INCLUDES', [], ['']) builtin.register_c_compiler('midl.compile.idl', ['IDL'], ['MSTYPELIB', 'H', 'C(%_i)', 'C(%_proxy)', 'C(%_dlldata)'], []) -# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior +# MIDL does not always generate '%_proxy.c' and '%_dlldata.c'. This behavior # depends on contents of the source IDL file. Calling TOUCH_FILE below ensures -# that both files will be created so bjam will not try to recreate them +# that both files will be created so bjam will not try to recreate them # constantly. get_manager().engine().register_action( 'midl.compile.idl', @@ -130,5 +130,5 @@ $(MIDLFLAGS) /iid "$(<[3]:W)" /proxy "$(<[4]:W)" /dlldata "$(<[5]:W)")" -{touch} "$(<[4]:W)" +{touch} "$(<[4]:W)" {touch} "$(<[5]:W)"'''.format(touch=common.file_creation_command())) diff --git a/src/tools/msvc.jam b/src/tools/msvc.jam index 7fbe0f2e1..f3561db20 100644 --- a/src/tools/msvc.jam +++ b/src/tools/msvc.jam @@ -281,18 +281,7 @@ rule configure-version-specific ( toolset : version : conditions ) # dependencies to put there. toolset.flags $(toolset).link LINKFLAGS $(conditions) : /MANIFEST ; } - - # Starting with Visual Studio 2013 the CRT is split into a desktop and app dll. - #If targeting WinRT and 12.0 set lib path to link against app CRT. - if [ MATCH "(12)" : $(version) ] - { - local VCPath = [ path.parent [ path.make [ default-path $(version) ] ] ] ; - local storeLibPath = [ path.join [ path.join $(VCPath) "lib" ] "store" ] ; - toolset.flags $(toolset).link LINKPATH $(conditions)/store/$(.cpu-arch-i386) : [ path.native $(storeLibPath) ] ; - toolset.flags $(toolset).link LINKPATH $(conditions)/store/$(.cpu-arch-amd64) : [ path.native [ path.join $(storeLibPath) "amd64" ] ] ; - toolset.flags $(toolset).link LINKPATH $(conditions)/store/$(.cpu-arch-arm) : [ path.native [ path.join $(storeLibPath) "arm" ] ] ; - } - + toolset.pop-checking-for-flags-module ; } @@ -1061,6 +1050,14 @@ local rule configure-really ( version ? : options * ) { default-global-setup-options-amd64 = amd64 ; } + # When Boost.Build itself is running as a 32-bit process on 64-bit + # Windows, the above test will fail (since WOW64 simulates a 32-bit + # environment, including environment values). So check the WOW64 + # variable PROCESSOR_ARCHITEW6432 as well. + if [ MATCH ^(AMD64) : [ os.environ PROCESSOR_ARCHITEW6432 ] ] + { + default-global-setup-options-amd64 = amd64 ; + } # TODO: The same 'native compiler usage' should be implemented for # the Itanium platform by using the "ia64" parameter. For this # though we need someone with access to this platform who can find @@ -1164,7 +1161,14 @@ local rule configure-really ( version ? : options * ) { setup-script = $(setup-phone-$(c)) ; } - toolset.flags msvc.compile .CC $(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ; + if $(api) = desktop + { + toolset.flags msvc.compile .CC $(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ; + } + else + { + toolset.flags msvc.compile .CC $(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 /ZW /EHsc -nologo ; + } toolset.flags msvc.compile .ASM $(api)/$(cpu-conditions) : $(setup-script)$(cpu-assembler) -nologo ; toolset.flags msvc.link .LD $(api)/$(cpu-conditions) : $(setup-script)$(linker) /NOLOGO /INCREMENTAL:NO ; toolset.flags msvc.archive .LD $(api)/$(cpu-conditions) : $(setup-script)$(linker) /lib /NOLOGO ; @@ -1176,6 +1180,16 @@ local rule configure-really ( version ? : options * ) } } + # Starting with Visual Studio 2013 the CRT is split into a desktop and app dll. + # If targeting WinRT and 12.0 set lib path to link against app CRT. + if [ MATCH "(12)" : $(version) ] + { + local storeLibPath = [ path.join $(parent) "lib/store" ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-i386) : [ path.native $(storeLibPath) ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-amd64) : [ path.native [ path.join $(storeLibPath) "amd64" ] ] ; + toolset.flags msvc.link LINKPATH $(conditions)/store/$(.cpu-arch-arm) : [ path.native [ path.join $(storeLibPath) "arm" ] ] ; + } + # Set version-specific flags. configure-version-specific msvc : $(version) : $(conditions) ; } diff --git a/src/tools/msvc.py b/src/tools/msvc.py index 02dce9f9e..8cdc273e0 100644 --- a/src/tools/msvc.py +++ b/src/tools/msvc.py @@ -44,15 +44,15 @@ __debug = None def debug(): global __debug if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") + __debug = "--debug-configuration" in bjam.variable("ARGV") return __debug -# It is not yet clear what to do with Cygwin on python port. +# It is not yet clear what to do with Cygwin on python port. def on_cygwin(): return False - + type.register('MANIFEST', ['manifest']) feature.feature('embed-manifest',['on','off'], ['incidental', 'propagated']) ; @@ -113,7 +113,7 @@ def init(version = None, command = None, options = None): options = to_seq(options) command = to_seq(command) - + if command: options.extend(""+cmd for cmd in command) configure(version,options) @@ -122,7 +122,7 @@ def configure(version=None, options=None): if version == "all": if options: raise RuntimeError("MSVC toolset configuration: options should be empty when '{}' is specified.".format(version)) - + # Configure (i.e. mark as used) all registered versions. all_versions = __versions.all() if not all_versions: @@ -145,7 +145,7 @@ def configure(version=None, options=None): def extend_conditions(conditions,exts): return [ cond + '/' + ext for cond in conditions for ext in exts ] - + def configure_version_specific(toolset_arg, version, conditions): # Starting with versions 7.0, the msvc compiler have the /Zc:forScope and # /Zc:wchar_t options that improve C++ standard conformance, but those @@ -204,7 +204,7 @@ def configure_version_specific(toolset_arg, version, conditions): toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_amd64), ['/MACHINE:X64']) toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_i386), ['/MACHINE:X86']) toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', extend_conditions(conditions, __cpu_arch_ia64), ['/MACHINE:IA64']) - + # Make sure that manifest will be generated even if there is no # dependencies to put there. toolset.flags('{}.link'.format(toolset_arg), 'LINKFLAGS', conditions, ['/MANIFEST']) @@ -216,12 +216,12 @@ def configure_version_specific(toolset_arg, version, conditions): def register_toolset(): if not 'msvc' in feature.values('toolset'): register_toolset_really() - - + + engine = get_manager().engine() -# this rule sets up the pdb file that will be used when generating static -# libraries and the debug-store option is database, so that the compiler +# this rule sets up the pdb file that will be used when generating static +# libraries and the debug-store option is database, so that the compiler # puts all debug info into a single .pdb file named after the library # # Poking at source targets this way is probably not clean, but it's the @@ -252,7 +252,7 @@ $(LIBRARIES_MENTIONED_BY_FILE) "$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"'''.format(rm=common.rm_command()), function=archive) - + # For the assembler the following options are turned on by default: # # -Zp4 align structures to 4 bytes @@ -319,10 +319,10 @@ def compile_cpp_pch(targets,sources=[],properties=None): # # 1. PDB_CFLAG is only set for on/database, ensuring that the /Fd flag is dropped if PDB_CFLAG is empty # -# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++. +# 2. When compiling executables's source files, PDB_NAME is set on a per-source file basis by rule compile-c-c++. # The linker will pull these into the executable's PDB # -# 3. When compiling library's source files, PDB_NAME is updated to .pdb for each source file by rule archive, +# 3. When compiling library's source files, PDB_NAME is updated to .pdb for each source file by rule archive, # as in this case the compiler must be used to create a single PDB for our library. # @@ -330,7 +330,7 @@ class SetupAction: def __init__(self, setup_func, function): self.setup_func = setup_func self.function = function - + def __call__(self, targets, sources, property_set): assert(callable(self.setup_func)) # This can modify sources. @@ -378,7 +378,7 @@ def setup_preprocess_c_cpp_action(targets, sources, properties): sources += bjam.call('get-target-variable',targets,'PCH_FILE') sources += bjam.call('get-target-variable',targets,'PCH_HEADER') return 'preprocess-c-c++' - + register_setup_action( 'msvc.preprocess.c', setup_preprocess_c_cpp_action, @@ -436,7 +436,7 @@ register_setup_action( engine.register_action( 'msvc.compile.idl', '''$(.IDL) /nologo @"@($(<[1]:W).rsp:E= -"$(>:W)" +"$(>:W)" -D$(DEFINES) "-I$(INCLUDES:W)" -U$(UNDEFS) @@ -506,7 +506,7 @@ $(LIBRARIES) if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%''', function=link_dll, bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) - + engine.register_action( 'msvc.manifest.dll', '''if exist "$(<[1]).manifest" ( @@ -540,7 +540,7 @@ $(LIBRARIES) "$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"''', function=link_dll, bound_list=['DEF_FILE','LIBRARIES_MENTIONED_BY_FILE']) - + engine.register_action( 'msvc.manifest.dll', '''if test -e "$(<[1]).manifest"; then @@ -566,7 +566,7 @@ class MsvcPchGenerator(pch.PchGenerator): pch_header = s elif type.is_derived(s.type(), 'CPP') or type.is_derived(s.type(), 'C'): pch_source = s - + if not pch_header: raise RuntimeError( "can not build pch without pch-header" ) @@ -589,7 +589,7 @@ class MsvcPchGenerator(pch.PchGenerator): result_props.append(Property('pch-header', pch_header)) if pch_file: result_props.append(Property('pch-file', pch_file)) - + return property_set.PropertySet(result_props), generated @@ -635,7 +635,7 @@ def configure_really(version=None, options=[]): # Take the first registered (i.e. auto-detected) version. version = __versions.first() v = version - + # Note: 'version' can still be empty at this point if no versions have # been auto-detected. if not version: @@ -665,12 +665,12 @@ def configure_really(version=None, options=[]): conditions = common.check_init_parameters('msvc', None, ('version', v)) __versions.set(version, 'conditions', conditions) command = feature.get_values('', options) - + # If version is specified, we try to search first in default paths, and # only then in PATH. command = common.get_invocation_command('msvc', 'cl.exe', command, default_paths(version)) common.handle_options('msvc', conditions, command, options) - + if not version: # Even if version is not explicitly specified, try to detect the # version from the path. @@ -718,7 +718,7 @@ def configure_really(version=None, options=[]): # MSVC 7.1 compiler even though it thinks it is using the msvc-9.0 # toolset version. command = common.get_absolute_tool_path(command) - + if command: parent = os.path.dirname(os.path.normpath(command)) # Setup will be used if the command name has been specified. If @@ -834,7 +834,7 @@ def configure_really(version=None, options=[]): default_assembler_ia64 = 'ias' assembler = feature.get_values('',options) - + idl_compiler = feature.get_values('',options) if not idl_compiler: idl_compiler = 'midl' @@ -851,7 +851,7 @@ def configure_really(version=None, options=[]): for c in cpu: cpu_conditions = [ condition + '/' + arch for arch in globals()['__cpu_arch_{}'.format(c)] for condition in conditions ] - + setup_script = setup_scripts.get(c, '') if debug(): @@ -934,7 +934,7 @@ class MsvcLinkingGenerator(builtin.LinkingGenerator): if result: name_main = result[0].name() action = result[0].action() - + if prop_set.get('') == 'on': # We force exact name on PDB. The reason is tagging -- the tag rule may # reasonably special case some target types, like SHARED_LIB. The tag rule @@ -947,7 +947,7 @@ class MsvcLinkingGenerator(builtin.LinkingGenerator): action.replace_targets(target,registered_target) result.append(registered_target) if prop_set.get('') == 'off': - # Manifest is evil target. It has .manifest appened to the name of + # Manifest is evil target. It has .manifest appened to the name of # main target, including extension. E.g. a.exe.manifest. We use 'exact' # name because to achieve this effect. target = FileTarget(name_main+'.manifest', 'MANIFEST', project, action, True) @@ -1151,14 +1151,14 @@ class MSVCConfigurations(Configurations): def first(self): return self.first_ - + # List of all registered configurations. __versions = MSVCConfigurations() # Supported CPU architectures. __cpu_arch_i386 = [ - '/', + '/', '/32', 'x86/', 'x86/32'] diff --git a/src/tools/package.jam b/src/tools/package.jam index 198c22315..c703580fc 100644 --- a/src/tools/package.jam +++ b/src/tools/package.jam @@ -71,7 +71,7 @@ rule install ( name package-name ? : requirements * : binaries * : libraries * : # First, figure out all locations. Use the default if no prefix option # given. - local prefix = [ get-prefix $(name) : $(requirements) ] ; + local prefix = [ get-prefix $(package-name) : $(requirements) ] ; # Architecture dependent files. local exec-locate = [ option.get exec-prefix : $(prefix) ] ; diff --git a/src/tools/package.py b/src/tools/package.py index aa081b4f4..a3b1baef4 100644 --- a/src/tools/package.py +++ b/src/tools/package.py @@ -68,14 +68,14 @@ def install(name, package_name=None, requirements=[], binaries=[], libraries=[], option.set("bindir", None) option.set("libdir", None) option.set("includedir", None) - + # If is not specified, all headers are installed to # prefix/include, no matter what their relative path is. Sometimes that is # what is needed. install_source_root = property.select('install-source-root', requirements) if install_source_root: requirements = property.change(requirements, 'install-source-root', None) - + install_header_subdir = property.select('install-header-subdir', requirements) if install_header_subdir: install_header_subdir = ungrist(install_header_subdir[0]) @@ -98,16 +98,16 @@ def install(name, package_name=None, requirements=[], binaries=[], libraries=[], include_locate = option.get("includedir", os.path.join(prefix, "include")) stage.install(name + "-bin", binaries, requirements + ["" + bin_locate]) - + alias(name + "-lib", [name + "-lib-shared", name + "-lib-static"]) - + # Since the install location of shared libraries differs on universe # and cygwin, use target alternatives to make different targets. # We should have used indirection conditioanl requirements, but it's # awkward to pass bin-locate and lib-locate from there to another rule. alias(name + "-lib-shared", [name + "-lib-shared-universe"]) alias(name + "-lib-shared", [name + "-lib-shared-cygwin"], ["cygwin"]) - + # For shared libraries, we install both explicitly specified one and the # shared libraries that the installed executables depend on. stage.install(name + "-lib-shared-universe", binaries + libraries, @@ -141,7 +141,7 @@ def install_data(target_name, package_name, data, requirements): # If --prefix is explicitly specified on the command line, # then we need wipe away any settings of datarootdir option.set("datarootdir", None) - + prefix = get_prefix(package_name, requirements) datadir = option.get("datarootdir", os.path.join(prefix, "share")) @@ -156,7 +156,7 @@ def get_prefix(package_name, requirements): if specified: specified = ungrist(specified[0]) prefix = option.get("prefix", specified) - requirements = property.change(requirements, "install-default-prefix", None) + requirements = property.change(requirements, "install-default-prefix", None) # Or some likely defaults if neither is given. if not prefix: if os.name == "nt": diff --git a/src/tools/python.jam b/src/tools/python.jam index 1a41a1e86..cb4a4c3e0 100644 --- a/src/tools/python.jam +++ b/src/tools/python.jam @@ -736,7 +736,7 @@ local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? : local fallback-version ; # Anything left to find or check? - if ! ( $(interpreter-cmd) && $(includes) && $(libraries) ) + if ! ( $(interpreter-cmd) && $(version) && $(includes) && $(libraries) ) { # Values to be extracted from python's sys module. These will be set by # the probe rule, above, using Jam's dynamic scoping. @@ -962,16 +962,18 @@ local rule configure ( version ? : cmd-or-prefix ? : includes * : libraries ? : # symbols. If we linked to libpython, we would get duplicate symbols. So # declare two targets -- one for building extensions and another for # embedding. - # - # Unlike most *nix systems, Mac OS X's linker does not permit undefined - # symbols when linking a shared library. So, we still need to link against - # the Python framework, even when building extensions. Note that framework - # builds of Python always use shared libraries, so we do not need to worry - # about duplicate Python symbols. - if $(target-os) in windows cygwin darwin + if $(target-os) in windows cygwin { alias python_for_extensions : python : $(target-requirements) ; } + else if $(target-os) = darwin { + alias python_for_extensions + : + : $(target-requirements) + : + : $(usage-requirements) "-undefined dynamic_lookup" + ; + } # On AIX we need Python extensions and Boost.Python to import symbols from # the Python interpreter. Dynamic libraries opened with dlopen() do not # inherit the symbols from the Python interpreter. diff --git a/src/tools/qt5.jam b/src/tools/qt5.jam index 46a753019..6eac4ad3f 100644 --- a/src/tools/qt5.jam +++ b/src/tools/qt5.jam @@ -435,6 +435,8 @@ rule init ( prefix : version ? : condition * : namespace ? : infix ? : full_bin add-shared-library QtPrintSupport : QtGui : QT_PRINTSUPPORT_LIB : $(target-requirements) ; add-shared-library QtConcurrent : QtCore : QT_CONCURRENT_LIB : $(target-requirements) ; + add-shared-library QtPositioning : QtCore : QT_POSITIONING_LIB : $(target-requirements) ; + add-shared-library QtOpenGL : QtGui : QT_OPENGL_LIB : $(target-requirements) ; add-shared-library QtSvg : QtXml QtOpenGL : QT_SVG_LIB : $(target-requirements) ; @@ -473,6 +475,18 @@ rule init ( prefix : version ? : condition * : namespace ? : infix ? : full_bin add-shared-library QtQuickParticles : QtQml : : $(target-requirements) ; add-shared-library QtQuickTest : QtQml : : $(target-requirements) ; + # QtLocation (since 5.4) + add-shared-library QtLocation : QtQuick QtPositioning : QT_LOCATION_LIB : $(target-requirements) ; + + # Qt3d libraries (since 5.6) + add-shared-library Qt3DCore : QtGui : QT_3DCORE_LIB : $(target-requirements) ; + add-shared-library Qt3DRender : Qt3DCore QtConcurrent : QT_3DRENDER_LIB : $(target-requirements) ; + add-shared-library Qt3DLogic : Qt3DCore : QT_3DLOGIC_LIB : $(target-requirements) ; + add-shared-library Qt3DInput : Qt3DRender : QT_3DINPUT_LIB : $(target-requirements) ; + + # QtCharts (since 5.7) + add-shared-library QtCharts : QtWidgets : QT_CHARTS_LIB : $(target-requirements) ; + # Regular expression support add-shared-library QtV8 : QtCore : : $(target-requirements) ; diff --git a/src/tools/rc.py b/src/tools/rc.py index d026480d8..5bdebb9be 100644 --- a/src/tools/rc.py +++ b/src/tools/rc.py @@ -5,7 +5,7 @@ # distribute this software is granted provided this copyright notice appears in # all copies. This software is provided "as is" without express or implied # warranty, and with no claim as to its suitability for any purpose. -# +# # Copyright (c) 2006 Rene Rivera. # # Copyright (c) 2008 Steven Watanabe @@ -39,7 +39,7 @@ __debug = None def debug(): global __debug if __debug is None: - __debug = "--debug-configuration" in bjam.variable("ARGV") + __debug = "--debug-configuration" in bjam.variable("ARGV") return __debug type.register('RC', ['rc']) @@ -51,10 +51,10 @@ def configure (command = None, condition = None, options = None): """ Configures a new resource compilation command specific to a condition, usually a toolset selection condition. The possible options are: - + * (rc|windres) - Indicates the type of options the command accepts. - + Even though the arguments are all optional, only when a command, condition, and at minimum the rc-type option are given will the command be configured. This is so that callers don't have to check auto-configuration values @@ -79,11 +79,11 @@ engine = get_manager().engine() class RCAction: """Class representing bjam action defined from Python. The function must register the action to execute.""" - + def __init__(self, action_name, function): self.action_name = action_name self.function = function - + def __call__(self, targets, sources, property_set): if self.function: self.function(targets, sources, property_set) @@ -132,7 +132,7 @@ __angle_include_re = "#include[ ]*<([^<]+)>" # Register scanner for resources class ResScanner(scanner.Scanner): - + def __init__(self, includes): scanner.__init__ ; self.includes = includes @@ -149,7 +149,7 @@ class ResScanner(scanner.Scanner): "[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\ "[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4]) - # Icons and other includes may referenced as + # Icons and other includes may referenced as # # IDR_MAINFRAME ICON "res\\icon.ico" # @@ -163,19 +163,19 @@ class ResScanner(scanner.Scanner): # Attach binding of including file to included targets. # When target is directly created from virtual target # this extra information is unnecessary. But in other - # cases, it allows to distinguish between two headers of the - # same name included from different places. + # cases, it allows to distinguish between two headers of the + # same name included from different places. # We don't need this extra information for angle includes, # since they should not depend on including file (we can't # get literal "." in include path). g2 = g + "#" + b - + g = "<" + g + ">" g2 = "<" + g2 + ">" angle = [g + x for x in angle] quoted = [g2 + x for x in quoted] res = [g2 + x for x in res] - + all = angle + quoted bjam.call('mark-included', target, all) @@ -187,7 +187,7 @@ class ResScanner(scanner.Scanner): engine.set_target_variable(angle, 'SEARCH', [utility.get_value(inc) for inc in self.includes]) engine.set_target_variable(quoted, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes]) engine.set_target_variable(res, 'SEARCH', [b + utility.get_value(inc) for inc in self.includes]) - + # Just propagate current scanner to includes, in a hope # that includes do not change scanners. get_manager().scanners().propagate(self, angle + quoted) diff --git a/src/tools/stage.py b/src/tools/stage.py index 8eda4e258..76b10f65a 100644 --- a/src/tools/stage.py +++ b/src/tools/stage.py @@ -54,7 +54,7 @@ class InstallTargetClass(targets.BasicTarget): if a: ps = a.properties() properties = ps.all() - + # Unless true is in properties, which can happen # only if the user has explicitly requested it, nuke all # properties. @@ -80,7 +80,7 @@ class InstallTargetClass(targets.BasicTarget): properties.extend(build_ps.get_properties('dependency')) properties.extend(build_ps.get_properties('location')) - + properties.extend(build_ps.get_properties('install-no-version-symlinks')) @@ -93,7 +93,7 @@ class InstallTargetClass(targets.BasicTarget): properties.append(property.Property(p.feature(), os.path.abspath(p.value()))) return property_set.create(properties) - + def construct(self, name, source_targets, ps): @@ -125,7 +125,7 @@ class InstallTargetClass(targets.BasicTarget): new_ps, [i]) assert isinstance(r, property_set.PropertySet) staged_targets.extend(targets) - + else: staged_targets.append(copy_file(self.project(), ename, i, new_ps)) @@ -168,18 +168,18 @@ class InstallTargetClass(targets.BasicTarget): # CONSIDER: figure out why we can not use virtual-target.traverse here. # def collect_targets(self, targets): - + s = [t.creating_subvariant() for t in targets] s = unique(filter(lambda l: l != None,s)) - + result = set(targets) for i in s: i.all_referenced_targets(result) - + result2 = [] for r in result: if isinstance(r, property.Property): - + if r.feature().name() != 'use': result2.append(r.value()) else: @@ -284,7 +284,7 @@ class InstalledSharedLibGenerator(generators.Generator): else: need_relink = ps.get('dll-path') != source.action().properties().get('dll-path') - + if need_relink: # Rpath changed, need to relink. copied = relink_file(project, source, ps) @@ -308,13 +308,13 @@ class InstalledSharedLibGenerator(generators.Generator): # compatibility guarantees. If not, it is possible to skip those # symlinks. if ps.get('install-no-version-symlinks') != ['on']: - + result.append(symlink(m.group(1) + '.' + m.group(2), project, copied, ps)) result.append(symlink(m.group(1) + '.' + m.group(2) + '.' + m.group(3), project, copied, ps)) return result - + generators.register(InstalledSharedLibGenerator()) @@ -335,9 +335,9 @@ def install(name, sources, requirements=[], default_build=[], usage_requirements from b2.manager import get_manager t = get_manager().targets() - + project = get_manager().projects().current() - + return t.main_target_alternative( InstallTargetClass(name, project, t.main_target_sources(sources, name), diff --git a/src/tools/symlink.py b/src/tools/symlink.py index 6345ded6d..ed5388977 100644 --- a/src/tools/symlink.py +++ b/src/tools/symlink.py @@ -1,11 +1,11 @@ # Status: ported. # Base revision: 64488. -# Copyright 2003 Dave Abrahams -# Copyright 2002, 2003 Rene Rivera -# Copyright 2002, 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Dave Abrahams +# Copyright 2002, 2003 Rene Rivera +# Copyright 2002, 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Defines the "symlink" special target. 'symlink' targets make symbolic links # to the sources. @@ -30,18 +30,18 @@ class SymlinkTarget(targets.BasicTarget): _count = 0 def __init__(self, project, targets, sources): - + # Generate a fake name for now. Need unnamed targets eventually. fake_name = "symlink#%s" % SymlinkTarget._count SymlinkTarget._count = SymlinkTarget._count + 1 b2.build.targets.BasicTarget.__init__(self, fake_name, project, sources) - + # Remember the targets to map the sources onto. Pad or truncate # to fit the sources given. assert len(targets) <= len(sources) self.targets = targets[:] + sources[len(targets):] - + # The virtual targets corresponding to the given targets. self.virtual_targets = [] @@ -51,7 +51,7 @@ class SymlinkTarget(targets.BasicTarget): s = self.targets[i] a = virtual_target.Action(self.manager(), [t], "symlink.ln", ps) vt = virtual_target.FileTarget(os.path.basename(s), t.type(), self.project(), a) - + # Place the symlink in the directory relative to the project # location, instead of placing it in the build directory. if not ps.get('symlink-location') == "project-relative": @@ -80,11 +80,11 @@ class SymlinkTarget(targets.BasicTarget): def symlink(targets, sources): from b2.manager import get_manager - t = get_manager().targets() + t = get_manager().targets() p = get_manager().projects().current() return t.main_target_alternative( - SymlinkTarget(p, targets, + SymlinkTarget(p, targets, # Note: inline targets are not supported for symlink, intentionally, # since it's used to linking existing non-local targets. sources)) diff --git a/src/tools/testing-aux.jam b/src/tools/testing-aux.jam index 5c7a267da..a264e4c35 100644 --- a/src/tools/testing-aux.jam +++ b/src/tools/testing-aux.jam @@ -52,6 +52,21 @@ actions (failed-as-expected) echo failed as expected > "$(<)" } + +if [ os.name ] = VMS +{ + actions **passed** + { + PIPE WRITE SYS$OUTPUT "passed" > $(<:W) + } + + actions (failed-as-expected) + { + PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W) + } +} + + # Runs executable 'sources' and stores stdout in file 'target'. Unless # --preserve-test-targets command line option has been specified, removes the # executable. The 'target-to-remove' parameter controls what should be removed: @@ -98,6 +113,12 @@ rule capture-output ( target : source : properties * : targets-to-remove * ) # fails, but it is not likely to happen. RmTemps $(target) : $(targets-to-remove) ; } + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } } @@ -106,26 +127,53 @@ if [ os.name ] = NT .STATUS = %status% ; .SET_STATUS = "set status=%ERRORLEVEL%" ; .RUN_OUTPUT_NL = "echo." ; - .STATUS_0 = "%status% EQU 0 (" ; - .STATUS_NOT_0 = "%status% NEQ 0 (" ; - .VERBOSE = "%verbose% EQU 1 (" ; + .THEN = "(" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "%status% EQU 0 $(.THEN)" ; + .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ; + .VERBOSE = "%verbose% EQU 1 $(.THEN)" ; .ENDIF = ")" ; .SHELL_SET = "set " ; .CATENATE = type ; .CP = copy ; + .NULLIN = ; +} +else if [ os.name ] = VMS +{ + local nl = " +" ; + + .STATUS = "''status'" ; + .SET_STATUS = "status=$STATUS" ; + .SAY = "pipe write sys$output" ; ## not really echo + .RUN_OUTPUT_NL = "$(.SAY) \"\"" ; + .THEN = "$(nl)then" ; + .EXIT_SUCCESS = "1" ; + .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ; + .STATUS_0 = "status .eq. 0 $(.THEN)" ; + .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ; + .VERBOSE = "verbose .eq. 1 $(.THEN)" ; + .ENDIF = "endif" ; + .SHELL_SET = "" ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; } else { .STATUS = "$status" ; .SET_STATUS = "status=$?" ; .RUN_OUTPUT_NL = "echo" ; - .STATUS_0 = "test $status -eq 0 ; then" ; - .STATUS_NOT_0 = "test $status -ne 0 ; then" ; - .VERBOSE = "test $verbose -eq 1 ; then" ; + .THEN = "; then" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "test $status -eq 0 $(.THEN)" ; + .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ; + .VERBOSE = "test $verbose -eq 1 $(.THEN)" ; .ENDIF = "fi" ; .SHELL_SET = "" ; .CATENATE = cat ; .CP = cp ; + .NULLIN = "<" "/dev/null" ; } @@ -170,14 +218,76 @@ actions quietly updated ignore piecemeal together RmTemps } +if [ os.name ] = VMS +{ + actions capture-output bind INPUT_FILES output-file + { + $(PATH_SETUP) + !! Execute twice - first for status, second for output + set noon + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL: + $(.SET_STATUS) + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W) + set on + !! Harmonize VMS success status with POSIX + if $(.SUCCESS) + $(.SHELL_SET)status="0" + $(.ENDIF) + $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W) + $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W) + if $(.STATUS_0) + $(.CP) $(output-file:W) $(<:W) + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + $(.SAY) "====== BEGIN OUTPUT ======" + $(.CATENATE) $(output-file:W) + $(.SAY) "====== END OUTPUT ======" + $(.ENDIF) + !! Harmonize VMS success status with POSIX on exit + if $(.STATUS_0) + $(.SHELL_SET)status="$(.EXIT_SUCCESS)" + $(.ENDIF) + exit "$(.STATUS)" + } + + actions quietly updated ignore piecemeal together RmTemps + { + $(.RM) $(>:WJ=;*,);* + } +} + + .MAKE_FILE = [ common.file-creation-command ] ; + +rule unit-test ( target : source : properties * ) +{ + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } +} + actions unit-test { $(PATH_SETUP) $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)" } +if [ os.name ] = VMS +{ + actions unit-test + { + $(PATH_SETUP) + pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W) + } +} + # Note that this rule may be called multiple times for a single target in case # there are multiple actions operating on the same target in sequence. One such # example are msvc exe targets first created by a linker action and then updated @@ -218,3 +328,15 @@ actions time echo user: $(USER_TIME_SECONDS) > "$(<)" echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)" } + +if [ os.name ] = VMS +{ + actions time + { + WRITE SYS$OUTPUT "user: ", "$(USER_TIME)" + WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)" + + PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W) + } +} diff --git a/src/tools/testing.jam b/src/tools/testing.jam index adb3d3e7e..7fbb692a1 100644 --- a/src/tools/testing.jam +++ b/src/tools/testing.jam @@ -83,6 +83,14 @@ type.register LINK : : TEST ; type.register UNIT_TEST : passed : TEST ; +# Suffix to denote test target directory +# +.TEST-DIR-SUFFIX = ".test" ; +if [ os.name ] = VMS +{ + .TEST-DIR-SUFFIX = "$test" ; +} + # Declare the rules which create main targets. While the 'type' module already # creates rules with the same names for us, we need extra convenience: default # name of main target, so write our own versions. @@ -104,11 +112,11 @@ rule make-test ( target-type : sources + : requirements * : target-name ? ) local project = [ project.current ] ; # The forces the build system for generate paths in the - # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow - # post-processing tools to work. + # form '$build_dir/array1$(.TEST-DIR-SUFFIX)/gcc/debug'. This is necessary + # to allow post-processing tools to work. local t = [ targets.create-typed-target [ type.type-from-rule-name $(target-type) ] : $(project) : $(real-name) : $(sources) : - $(requirements) $(real-name).test ] ; + $(requirements) $(real-name)$(.TEST-DIR-SUFFIX) ] ; # The alias to the real target, per period replacement above. if $(real-name) != $(target-name) @@ -266,8 +274,8 @@ rule dump-test ( target ) } } - local target-name = [ $(project).get location ] // [ $(target).name ] .test - ; + local target-name = + [ $(project).get location ] // [ $(target).name ] $(.TEST-DIR-SUFFIX) ; target-name = $(target-name:J=) ; local r = [ $(target).requirements ] ; @@ -366,6 +374,7 @@ rule **passed** } + # Used to create test files signifying passed tests. # actions **passed** @@ -373,7 +382,6 @@ actions **passed** echo passed > "$(<)" } - # Used to create replacement object files that do not get created during tests # that are expected to fail. # @@ -383,6 +391,19 @@ actions (failed-as-expected) } +if [ os.name ] = VMS +{ + actions **passed** + { + PIPE WRITE SYS$OUTPUT "passed" > $(<:W) + } + + actions (failed-as-expected) + { + PIPE WRITE SYS$OUTPUT "failed as expected" > $(<:W) + } +} + rule run-path-setup ( target : source : properties * ) { # For testing, we need to make sure that all dynamic libraries needed by the @@ -395,7 +416,12 @@ rule run-path-setup ( target : source : properties * ) dll-paths = [ sequence.unique $(dll-paths) ] ; if $(dll-paths) { - dll-paths = [ sequence.transform path.native : $(dll-paths) ] ; + translate-to-os = path.native ; + if [ os.name ] = VMS + { + translate-to-os = path.to-VMS ; + } + dll-paths = [ sequence.transform $(translate-to-os) : $(dll-paths) ] ; PATH_SETUP on $(target) = [ common.prepend-path-variable-command [ os.shared-library-path-variable ] : $(dll-paths) ] ; } @@ -456,7 +482,7 @@ rule capture-output ( target : source : properties * : targets-to-remove * ) { DISABLE_TEST_EXECUTION on $(target) = 1 ; } - + if [ feature.get-values preserve-test-targets : $(properties) ] = off || $(.preserve-test-targets) = off { @@ -466,6 +492,12 @@ rule capture-output ( target : source : properties * : targets-to-remove * ) rmtemp-all-sources $(to-remove) ; } } + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } } .types-to-remove = EXE OBJ ; @@ -530,23 +562,48 @@ if [ os.name ] = NT .STATUS = %status% ; .SET_STATUS = "set status=%ERRORLEVEL%" ; .RUN_OUTPUT_NL = "echo." ; - .STATUS_0 = "%status% EQU 0 (" ; - .STATUS_NOT_0 = "%status% NEQ 0 (" ; - .VERBOSE = "%verbose% EQU 1 (" ; + .THEN = "(" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "%status% EQU 0 $(.THEN)" ; + .STATUS_NOT_0 = "%status% NEQ 0 $(.THEN)" ; + .VERBOSE = "%verbose% EQU 1 $(.THEN)" ; .ENDIF = ")" ; .SHELL_SET = "set " ; .CATENATE = type ; .CP = copy ; .NULLIN = ; } +else if [ os.name ] = VMS +{ + local nl = " +" ; + + .STATUS = "''status'" ; + .SET_STATUS = "status=$STATUS" ; + .SAY = "pipe write sys$output" ; ## not really echo + .RUN_OUTPUT_NL = "$(.SAY) \"\"" ; + .THEN = "$(nl)then" ; + .EXIT_SUCCESS = "1" ; + .SUCCESS = "status .eq. $(.EXIT_SUCCESS) $(.THEN)" ; + .STATUS_0 = "status .eq. 0 $(.THEN)" ; + .STATUS_NOT_0 = "status .ne. 0 $(.THEN)" ; + .VERBOSE = "verbose .eq. 1 $(.THEN)" ; + .ENDIF = "endif" ; + .SHELL_SET = "" ; + .CATENATE = type ; + .CP = copy ; + .NULLIN = ; +} else { .STATUS = "$status" ; .SET_STATUS = "status=$?" ; .RUN_OUTPUT_NL = "echo" ; - .STATUS_0 = "test $status -eq 0 ; then" ; - .STATUS_NOT_0 = "test $status -ne 0 ; then" ; - .VERBOSE = "test $verbose -eq 1 ; then" ; + .THEN = "; then" ; + .EXIT_SUCCESS = "0" ; + .STATUS_0 = "test $status -eq 0 $(.THEN)" ; + .STATUS_NOT_0 = "test $status -ne 0 $(.THEN)" ; + .VERBOSE = "test $verbose -eq 1 $(.THEN)" ; .ENDIF = "fi" ; .SHELL_SET = "" ; .CATENATE = cat ; @@ -571,7 +628,7 @@ actions capture-output bind INPUT_FILES output-file $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION) if $(.STATUS_NOT_0) echo Skipping test execution due to testing.execute=off - exit 0 + exit $(.EXIT_SUCCESS) $(.ENDIF) $(LAUNCHER) "$(>)" $(ARGS) "$(INPUT_FILES)" > "$(output-file)" 2>&1 $(.NULLIN) $(.SET_STATUS) @@ -598,6 +655,52 @@ actions quietly updated ignore piecemeal together RmTemps $(.RM) "$(>)" } +if [ os.name ] = VMS +{ + actions capture-output bind INPUT_FILES output-file + { + $(PATH_SETUP) + $(.SHELL_SET)status=$(DISABLE_TEST_EXECUTION) + if $(.STATUS_NOT_0) + $(.SAY) "Skipping test execution due to testing.execute=off" + exit "$(.EXIT_SUCCESS)" + $(.ENDIF) + !! Execute twice - first for status, second for output + set noon + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) 2>NL: >NL: + $(.SET_STATUS) + pipe $(LAUNCHER) $(>:W) $(ARGS) $(INPUT_FILES:W) | type sys$input /out=$(output-file:W) + set on + !! Harmonize VMS success status with POSIX + if $(.SUCCESS) + $(.SHELL_SET)status="0" + $(.ENDIF) + $(.RUN_OUTPUT_NL) | append /new sys$input $(output-file:W) + $(.SAY) "EXIT STATUS: $(.STATUS)" | append /new sys$input $(output-file:W) + if $(.STATUS_0) + $(.CP) $(output-file:W) $(<:W) + $(.ENDIF) + $(.SHELL_SET)verbose=$(.VERBOSE_TEST) + if $(.STATUS_NOT_0) + $(.SHELL_SET)verbose=1 + $(.ENDIF) + if $(.VERBOSE) + $(.SAY) "====== BEGIN OUTPUT ======" + $(.CATENATE) $(output-file:W) + $(.SAY) "====== END OUTPUT ======" + $(.ENDIF) + !! Harmonize VMS success status with POSIX on exit + if $(.STATUS_0) + $(.SHELL_SET)status="$(.EXIT_SUCCESS)" + $(.ENDIF) + exit "$(.STATUS)" + } + + actions quietly updated ignore piecemeal together RmTemps + { + $(.RM) $(>:WJ=;*,);* + } +} .MAKE_FILE = [ common.file-creation-command ] ; @@ -608,6 +711,12 @@ toolset.flags testing.unit-test ARGS ; rule unit-test ( target : source : properties * ) { run-path-setup $(target) : $(source) : $(properties) ; + + if ! [ feature.get-values testing.launcher : $(properties) ] + { + ## On VMS set default launcher to MCR + if [ os.name ] = VMS { LAUNCHER on $(target) = MCR ; } + } } @@ -617,6 +726,14 @@ actions unit-test $(LAUNCHER) "$(>)" $(ARGS) && $(.MAKE_FILE) "$(<)" } +if [ os.name ] = VMS +{ + actions unit-test + { + $(PATH_SETUP) + pipe $(LAUNCHER) $(>:W) $(ARGS) && $(.MAKE_FILE) $(<:W) + } +} IMPORT $(__name__) : compile compile-fail run run-fail link link-fail : : compile compile-fail run run-fail link link-fail ; @@ -673,3 +790,15 @@ actions time echo user: $(USER_TIME_SECONDS) > "$(<)" echo system: $(SYSTEM_TIME_SECONDS) >> "$(<)" } + +if [ os.name ] = VMS +{ + actions time + { + WRITE SYS$OUTPUT "user: ", "$(USER_TIME)" + WRITE SYS$OUTPUT "system: ", "(SYSTEM_TIME)" + + PIPE WRITE SYS$OUTPUT "user: ", "$(USER_TIME_SECONDS)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "system: ", "$(SYSTEM_TIME_SECONDS)" | APPEND /NEW SYS$INPUT $(<:W) + } +} diff --git a/src/tools/testing.py b/src/tools/testing.py index a3b3f0117..868905a05 100644 --- a/src/tools/testing.py +++ b/src/tools/testing.py @@ -45,7 +45,7 @@ import b2.build_system as build_system from b2.manager import get_manager -from b2.util import stem, bjam_signature +from b2.util import stem, bjam_signature, is_iterable_typed from b2.util.sequence import unique import bjam @@ -88,7 +88,10 @@ __all_tests = [] # Helper rule. Create a test target, using basename of first source if no target # name is explicitly passed. Remembers the created target in a global variable. def make_test(target_type, sources, requirements, target_name=None): - + assert isinstance(target_type, basestring) + assert is_iterable_typed(sources, basestring) + assert is_iterable_typed(requirements, basestring) + assert isinstance(target_type, basestring) or target_type is None if not target_name: target_name = stem(os.path.basename(sources[0])) @@ -151,7 +154,7 @@ def handle_input_files(input_files): @bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], ["requirements", "*"], ["target_name", "?"], - ["default_build", "*"])) + ["default_build", "*"])) def run(sources, args, input_files, requirements, target_name=None, default_build=[]): if args: requirements.append("" + " ".join(args)) @@ -160,7 +163,7 @@ def run(sources, args, input_files, requirements, target_name=None, default_buil @bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], ["requirements", "*"], ["target_name", "?"], - ["default_build", "*"])) + ["default_build", "*"])) def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]): if args: requirements.append("" + " ".join(args)) @@ -189,7 +192,8 @@ __ln1 = re.compile("/(tools|libs)/(.*)/(test|example)") __ln2 = re.compile("/(tools|libs)/(.*)$") __ln3 = re.compile("(/status$)") def get_library_name(path): - + assert isinstance(path, basestring) + path = path.replace("\\", "/") match1 = __ln1.match(path) match2 = __ln2.match(path) @@ -216,6 +220,7 @@ __out_xml = option.get("out-xml", False, True) # - relative location of all source from the project root. # def dump_test(target): + assert isinstance(target, targets.AbstractTarget) type = target.type() name = target.name() project = target.project() @@ -298,7 +303,11 @@ generators.register_composing("testing.time", [], ["TIME"]) # contained in testing-aux.jam, which we load into Jam module named 'testing' def run_path_setup(target, sources, ps): - + if __debug__: + from ..build.property_set import PropertySet + assert is_iterable_typed(target, basestring) or isinstance(target, basestring) + assert is_iterable_typed(sources, basestring) + assert isinstance(ps, PropertySet) # For testing, we need to make sure that all dynamic libraries needed by the # test are found. So, we collect all paths from dependency libraries (via # xdll-path property) and add whatever explicit dll-path user has specified. @@ -313,7 +322,12 @@ def run_path_setup(target, sources, ps): common.shared_library_path_variable(), dll_paths)) def capture_output_setup(target, sources, ps): - run_path_setup(target, sources, ps) + if __debug__: + from ..build.property_set import PropertySet + assert is_iterable_typed(target, basestring) + assert is_iterable_typed(sources, basestring) + assert isinstance(ps, PropertySet) + run_path_setup(target[0], sources, ps) if ps.get('preserve-test-targets') == ['off']: bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1") diff --git a/src/tools/types/register.jam b/src/tools/types/register.jam index 203992ca9..daedfb701 100644 --- a/src/tools/types/register.jam +++ b/src/tools/types/register.jam @@ -21,7 +21,7 @@ local rule type ( type : suffixes * : base-type ? : os * ) } .this-module's-file = [ modules.binding $(__name__) ] ; -.this-module's-dir = [ path.parent $(.this-module's-file) ] ; +.this-module's-dir = [ path.parent [ path.make $(.this-module's-file) ] ] ; .sibling-jamfiles = [ path.glob $(.this-module's-dir) : *.jam ] ; .sibling-modules = [ MATCH ^(.*)\.jam$ : $(.sibling-jamfiles) ] ; @@ -32,7 +32,7 @@ for m in $(.sibling-modules) m = types/$(m) ; # Inject the type rule into the new module - IMPORT $(__name__) : type : $(m) : type ; + IMPORT $(__name__) : type : $(m:B) : type ; import $(m) ; } diff --git a/src/tools/unix.py b/src/tools/unix.py index 34758f57b..681a87202 100644 --- a/src/tools/unix.py +++ b/src/tools/unix.py @@ -15,17 +15,17 @@ from b2.util.utility import * from b2.util import set, sequence class UnixLinkingGenerator (builtin.LinkingGenerator): - + def __init__ (self, id, composing, source_types, target_types, requirements): builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements) - + def run (self, project, name, prop_set, sources): result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources) if result: set_library_order (project.manager (), sources, prop_set, result [1]) - + return result - + def generated_targets (self, sources, prop_set, project, name): sources2 = [] libraries = [] @@ -35,34 +35,34 @@ class UnixLinkingGenerator (builtin.LinkingGenerator): else: sources2.append (l) - + sources = sources2 + order_libraries (libraries) - + return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name) class UnixArchiveGenerator (builtin.ArchiveGenerator): def __init__ (self, id, composing, source_types, target_types_and_names, requirements): builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements) - + def run (self, project, name, prop_set, sources): result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources) set_library_order(project.manager(), sources, prop_set, result) return result class UnixSearchedLibGenerator (builtin.SearchedLibGenerator): - + def __init__ (self): builtin.SearchedLibGenerator.__init__ (self) - + def optional_properties (self): return self.requirements () - + def run (self, project, name, prop_set, sources): result = SearchedLibGenerator.run (project, name, prop_set, sources) - + set_library_order (sources, prop_set, result) - + return result class UnixPrebuiltLibGenerator (generators.Generator): @@ -86,21 +86,21 @@ generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB ### # Declare generators -### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE +### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE ### : unix ] ; generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['unix'])) -### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB +### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB ### : unix ] ; -### -### generators.register [ new UnixSearchedLibGenerator +### +### generators.register [ new UnixSearchedLibGenerator ### unix.SearchedLibGenerator : : SEARCHED_LIB : unix ] ; -### -### +### +### ### # The derived toolset must specify their own actions. ### actions link { ### } -### +### ### actions link.dll { ### } @@ -110,9 +110,9 @@ def unix_archive (manager, targets, sources, properties): # FIXME: restore? #action.register ('unix.archive', unix_archive, ['']) -### actions searched-lib-generator { +### actions searched-lib-generator { ### } -### +### ### actions prebuilt { ### } @@ -141,10 +141,10 @@ def set_library_order (manager, sources, prop_set, result): for l in result: if l.type () and type.is_derived (l.type (), 'LIB'): created_libraries.append (l) - + created_libraries = set.difference (created_libraries, used_libraries) set_library_order_aux (created_libraries, used_libraries) def order_libraries (libraries): return __order.order (libraries) - + diff --git a/src/tools/vmsdecc.jam b/src/tools/vmsdecc.jam new file mode 100644 index 000000000..9c159f288 --- /dev/null +++ b/src/tools/vmsdecc.jam @@ -0,0 +1,578 @@ +# Copyright (c) 2015 Artur Shepilko +# +# Use, modification and distribution is subject to the Boost Software +# License Version 1.0. (See accompanying file LICENSE_1_0.txt or +# http://www.boost.org/LICENSE_1_0.txt) + +# Implements OpenVMS-based HP DECC/C++ toolset. +# Relies on POSIX-style path handling bjam/Boost.Build implementation for VMS. + +import "class" : new ; +import property ; +import generators ; +import os ; +import toolset : flags ; +import feature ; +import type ; +import common ; +import unix ; +import path ; + + +if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] +{ + .debug-configuration = true ; +} + +feature.extend toolset : vmsdecc ; + +toolset.inherit-generators vmsdecc : unix : unix.link unix.link.dll ; +toolset.inherit-flags vmsdecc : unix ; +toolset.inherit-rules vmsdecc : unix ; + +generators.override vmsdecc.archive-generator : builtin.archive-generator ; +generators.override vmsdecc.prebuilt : builtin.prebuilt ; +generators.override vmsdecc.searched-lib-generator : searched-lib-generator ; + +type.set-generated-target-suffix EXE : vmsdecc vms : exe ; +type.set-generated-target-suffix OBJ : vmsdecc vms : obj ; +type.set-generated-target-suffix PREPROCESSED_C : vmsdecc vms : i ; +type.set-generated-target-suffix PREPROCESSED_CPP : vmsdecc vms : ixx ; +type.set-generated-target-suffix STATIC_LIB : vmsdecc vms : olb ; ## xxx.olb + +type.register-suffixes exe : SHARED_LIB ; +type.set-generated-target-prefix SHARED_LIB : vmsdecc vms : shr ; ## shrxxx.exe +type.set-generated-target-suffix SHARED_LIB : vmsdecc vms : exe ; ## shrxxx.exe + +.OBJ = .obj ; ## suffix +.nl = " +" ; + +rule init ( version ? : command * : options * ) +{ + local argv = [ modules.peek : ARGV ] ; + + local condition = [ + common.check-init-parameters vmsdecc : version $(version) ] ; + + # CC and CXX are CLI commands, so no need to search for the executables + command = CXX ; + toolset.flags vmsdecc .CXX $(condition) : CXX ; + common.handle-options vmsdecc : $(condition) : $(command) : $(options) ; + + local command_c = $(command[1--2]) $(command[-1]:B=CC) ; + toolset.flags vmsdecc .CC $(condition) : $(command_c) ; + + local linker = [ feature.get-values : $(options) ] ; + linker ?= CXXLINK ; + toolset.flags vmsdecc.link .LD $(condition) : $(linker) ; + if $(.debug-configuration) + { + ECHO notice: using linker :: $(condition) :: $(linker[1]) ; + } + + local archiver = LIB ; + toolset.flags vmsdecc.archive .AR $(condition) : $(archiver) ; + + local b2 = $(argv[1]) ; + toolset.flags vmsdecc .B2 $(condition) : $(b2) ; +} + +# Declare generators +generators.register-c-compiler vmsdecc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c.preprocess : C : PREPROCESSED_C : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c : C : OBJ : vmsdecc ; +generators.register-c-compiler vmsdecc.compile.c++ : CPP : OBJ : vmsdecc ; + +# Declare flags and actions for compilation +flags vmsdecc.compile OPTIONS on : /DEBUG ; +flags vmsdecc.compile OPTIONS on : /DEBUG ; ## needs PCA link options +flags vmsdecc.compile OPTIONS off : /NOOPT ; +flags vmsdecc.compile OPTIONS speed : /OPT=INLINE=SPEED/OPT=NOINLINE ; +flags vmsdecc.compile OPTIONS space : /OPT=INLINE=SIZE/OPT=NOINLINE ; +flags vmsdecc.compile OPTIONS off : /NOWARN; +flags vmsdecc.compile OPTIONS on : /WARN ; +flags vmsdecc.compile OPTIONS all : /WARN=ENABLE=ALL ; + +flags vmsdecc.compile.c++ OPTIONS off : /OPT=NOINLINE ; + +flags vmsdecc OPTIONS 32 : /POINTER=32 ; +flags vmsdecc OPTIONS 64 : /POINTER=64 ; ## /POINTER=64=ARGV argv-64 + +flags vmsdecc.compile OPTIONS ; +flags vmsdecc.compile.c++ OPTIONS ; +flags vmsdecc.compile DEFINES ; +flags vmsdecc.compile UNDEFS ; +flags vmsdecc.compile INCLUDES ; +flags vmsdecc.compile.c++ TEMPLATE_DEPTH ; + +feature.feature cxx-repository : : free path ; #order-sensitive ; +flags vmsdecc CXX-REPOS ; + + +local rule get-includes ( sources * : includes * ) +{ + local result ; + + ## Expect POSIX-style path, quote in double-quotes + for local d in $(sources:D) $(includes) + { + if $(d) + { + local QUOTE = \" ; + local SEP = / ; + + local enquote = false ; + local addsep = false ; + + s = [ SPLIT_BY_CHARACTERS $(d) : $(QUOTE) ] ; + + if $(s) = $(d) { enquote = true ; } + if [ SPLIT_BY_CHARACTERS $(s) : $(SEP) ] = $(s) { addsep = true ; } + + if $(addsep) + { + d = $(s)$(SEP) ; + enquote = true ; + } + + if $(enquote) + { + d = $(QUOTE)$(d)$(QUOTE) ; + } + + if ! $(d) in $(result) + { + result += $(d) ; + } + } + } + + return $(result) ; +} + +CXX-REPO-NAME = cxx_repository ; + +local rule get-target-cxx-repo ( target ) +{ + return [ path.join $(target) $(CXX-REPO-NAME) ] ; +} + +rule compile.c++ ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ; + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + + DEFINES on $(targets) = [ on $(targets) return "__USE_STD_IOSTREAM" $(DEFINES) ] ; + + INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ; + + TARGET-CXX-REPO on $(targets) = [ on $(targets[1]) get-target-cxx-repo $(LOCATE) ] ; + CXX-REPOS on $(targets) = [ on $(targets) return $(TARGET-CXX-REPO) $(CXX-REPOS) ] ; +} + + +rule compile.c ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(SOURCE-INCLUDES) ] ; + + INCLUDES on $(targets) = [ on $(targets) get-includes $(sources) : $(INCLUDES) ] ; +} + +actions compile.c +{ + $(.CC) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /OBJ=$(<:W) $(>:W) +} + +actions compile.c++ +{ + $(.CXX) $(OPTIONS) /DEF=("$(DEFINES:J=",")") /UNDEF=("$(UNDEFS:J=",")") /INC=($(INCLUDES:J=,)) /REPO=($(CXX-REPOS:WJ=,)) /OBJ=$(<:W) $(>:W) +} + + + +# Custom linking generator to separate dependency libraries and optfiles from +# the list of sources. The objfiles, libraries, and optfiles are then referenced +# via properties. This allows separate qualification of object-files and libraries +# on linker command line. +# +class vmsdecc-linking-generator : linking-generator +{ + rule run ( project name ? : property-set : sources + ) + { + local result = [ linking-generator.run $(project) $(name) : $(property-set) + : $(sources) ] ; + + return $(result) ; + } + + rule generated-targets ( sources + : property-set : project name ? ) + { + local sources2 ; # Sources to pass to inherited rule. + local properties2 ; # Properties to pass to inherited rule. + local objfiles ; # Object files. + local libraries ; # Library sources. + + properties2 = [ $(property-set).raw ] ; + + for local s in $(sources) + { + if [ type.is-derived [ $(s).type ] OBJ ] + { + objfiles += $(s) ; + properties2 += $(s) ; + } + else if [ type.is-derived [ $(s).type ] STATIC_LIB ] + { + libraries += $(s) ; + properties2 += $(s) ; + } + else if [ type.is-derived [ $(s).type ] SHARED_LIB ] + { + libraries += $(s) ; + properties2 += $(s) ; + } + } + + + return [ linking-generator.generated-targets $(sources) + : [ property-set.create $(properties2) ] : $(project) $(name) ] ; + } +} + + +generators.register [ new vmsdecc-linking-generator vmsdecc.link : + OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : EXE : vmsdecc ] ; + +generators.register [ new vmsdecc-linking-generator vmsdecc.link.dll : + OBJ SEARCHED_LIB STATIC_LIB SHARED_LIB : SHARED_LIB : vmsdecc ] ; + + + +# Declare flags and actions for linking +flags vmsdecc.link OPTIONS on : /DEBUG ; +# Strip the binary when no debugging is needed +flags vmsdecc.link OPTIONS off : /NODEBUG ; +flags vmsdecc.link OPTIONS on : /DEBUG ; ## need "DEFINE LIB$DEBUG PCA$COLLECTOR" +flags vmsdecc.link OPTIONS ; +flags vmsdecc.link LINKPATH ; +flags vmsdecc.link FINDLIBS-ST ; +flags vmsdecc.link FINDLIBS-SA ; +flags vmsdecc.link LIBRARIES ; +flags vmsdecc.link LINK-RUNTIME static : static ; +flags vmsdecc.link LINK-RUNTIME shared : dynamic ; +flags vmsdecc.link RPATH ; +flags vmsdecc.link FINDLIBS-SA ; + +feature.feature "link-objfile" : : free dependency path incidental ; +flags vmsdecc.link LINK-OBJS ; + +feature.feature "link-libmodule" : : free dependency incidental ; +flags vmsdecc.link LINK-LIBMODULES ; + +feature.feature "link-staticlib" : : free dependency path incidental ; +flags vmsdecc.link LINK-LIBS ; + +feature.feature "link-sharedlib" : : free dependency path incidental ; +flags vmsdecc.link LINK-SHAREDLIBS ; + +feature.feature "link-optfile" : : free dependency path incidental ; +flags vmsdecc.link LINK-OPTS ; + + +local rule export-target-var-contents ( var-name : values * ) +{ + local result ; + local nl = " +" ; + local locate ; + + if $(var-name) + { + result += + "$(nl)$(var-name) =" ; + for local f in $(values) + { + locate = [ on $(f) return $(LOCATE) ] ; + result += + "$(nl)\"$(f:TG=:R=$(locate))\"" ; + } + result += "$(nl) ;" ; + } + + return $(result) ; +} + +# VMS linker usually expects an explicit object module that contains main(). +# Yet on *NIX, the main module can be automatically resolved from a library -- +# this may arguably be convenient with dynamic linking, and is also used with +# Boost.Test. +# To handle such cases on VMS, one needs first to locate the library module +# containing main(), then include it in sources for the link command. +# GLOB_ARCHIVE built-in can locate the module name (globbing by symbol MAIN). +# To be able to use its result during jam-parsing stage, we need to execute it +# from a separate jam-file that produces a pre-defined option file for link. +# + +actions write-jam-file-contents +{ + SET FILE /VER=1 @($(<:W):E= $(>) ) +} + + +local rule mainmod-link-opt.generate ( jam-file : opt-file : objs * : libs * : sharedlibs * ) +{ + local nl = " +" ; + local $ = $ ; + local @ = @ ; + + if $(jam-file) && $(opt-file) + { + local .contents on $(jam-file) = + "# This file was auto-generated by $(__name__)." ; + + .contents on $(jam-file) += + "$(nl)OPT-FILE = $(opt-file) ;" ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "OBJS" : $(objs) ] ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "LIBS" : $(libs) ] ; + + .contents on $(jam-file) += [ on $(jam-file) + export-target-var-contents "SHAREDLIBS" : $(sharedlibs) ] ; + + .contents on $(jam-file) += + "$(nl).nl = \"$(nl)\" ;" + ; + .contents on $(jam-file) += + "$(nl)local rule get-main-members ( libs * : symbol-main ? )" + "$(nl){" + "$(nl) local result ;" + "$(nl) symbol-main ?= \"MAIN\" ;" + "$(nl) for local libfile in $($)(libs)" + "$(nl) {" + "$(nl) local main = [ GLOB_ARCHIVE $($)(libfile) : : : $($)(symbol-main) ] ;" + "$(nl) if $($)(main)" + "$(nl) {" + "$(nl) result += $($)(main) ;" + "$(nl) }" + "$(nl) }" + "$(nl) return $($)(result) ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)local rule get-libmods ( members * )" + "$(nl){" + "$(nl) local result ;" + "$(nl) for local m in $($)(members)" + "$(nl) {" + "$(nl) local lib = $($)(m:WDBS) ;" + "$(nl) local mem = $($)(m:M) ;" + "$(nl) if $($)(mem)" + "$(nl) {" + "$(nl) local mod = [ SPLIT_BY_CHARACTERS $($)(mem) : \"()\" ] ;" + "$(nl) result += $($)(lib)/INC=($($)(mod:B))/LIB ;" + "$(nl) }" + "$(nl) }" + "$(nl) return $($)(result) ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)rule mainmod-link-opt ( opt-file : libs * : objs * )" + "$(nl){" + "$(nl) local main-members = [ on $($)(opt-file[1]) get-main-members $($)(libs) ] ;" + "$(nl) LIBMODULES on $($)(opt-file[1]) = [ on $($)(opt-file[1]) get-libmods $($)(main-members[1]) ] ;" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)actions mainmod-link-opt bind OBJS LIBMODULES" + "$(nl){" + "$(nl) SET FILE /VER=1 $(@)($($)(<:W):E= $($)(LIBMODULES:J=,-$($)(.nl))-$($)(.nl) )" + "$(nl)}" + ; + .contents on $(jam-file) += + "$(nl)local rule make" + "$(nl){" + "$(nl) if $($)(OPT-FILE)" + "$(nl) {" + "$(nl) DEPENDS all : $($)(OPT-FILE) ;" + "$(nl) DEPENDS $($)(OPT-FILE) : $($)(LIBS) $($)(OBJS) ;" + "$(nl) mainmod-link-opt $($)(OPT-FILE) : $($)(LIBS) : $($)(OBJS) ;" + "$(nl) }" + "$(nl)}" + "$(nl)make all ;" + ; + + write-jam-file-contents $(jam-file) : [ on $(jam-file) return $(.contents) ] ; + + } +} + + +rule link ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ; + + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + local locate = [ on $(targets[1]) return $(LOCATE) ] ; + LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ; + LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ; + #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ; + + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ; + on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ; + + on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM) + : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ; + + +} + +actions link bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS +{ + CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)" + IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:" + DEF /NOLOG REPOS 'CXX_REPOS' + SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) ) + MC $(.B2) -f $(LINK-MAINMOD-JAM:W) + $(.LD) $(OPTIONS) /REPO=(REPOS:) /EXE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT +} + +# Slight mods for dlls +rule link.dll ( targets * : sources * : properties * ) +{ + DEPENDS $(targets) : [ on $(targets) return $(CXX-REPOS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OBJS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-LIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-SHAREDLIBS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-OPTS) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LIBRARIES) ] ; + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + + local locate = [ on $(targets[1]) return $(LOCATE) ] ; + LINK-MAINMOD-OPT on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.opt) ; + LINK-MAINMOD-JAM on $(targets[1]) = $(targets[1]:TG=:R=$(locate):S=$MAINMOD.jam) ; + #on $(targets[1]) TEMPORARY $(LINK-MAINMOD-JAM) ; + + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-OPT) ] ; + DEPENDS $(targets) : [ on $(targets) return $(LINK-MAINMOD-JAM) ] ; + on $(targets[1]) DEPENDS $(LINK-MAINMOD-OPT) : $(LINK-MAINMOD-JAM) ; + + on $(targets[1]) mainmod-link-opt.generate $(LINK-MAINMOD-JAM) + : $(LINK-MAINMOD-OPT) : $(LINK-OBJS) : $(LINK-LIBS) $(LIBRARIES) : $(LINK-SHAREDLIBS) ; + +} + +actions link.dll bind LINK-OBJS LINK-MAINMOD-JAM LINK-MAINMOD-OPT LINK-LIB LINK-LIBS LIBRARIES LINK-SHAREDLIBS LINK-OPTS CXX-REPOS +{ + CXX_REPOS = "" +"$(CXX-REPOS:WJ=,)" + IF (CXX_REPOS .EQS. "") THEN CXX_REPOS = "NL:" + DEF /NOLOG REPOS 'CXX_REPOS' + SET FILE /VER=1 @($(<:WS=$INPUT.opt):E= $(LINK-OBJS:WJ=,-$(.nl))-$(.nl) ,$(LINK-LIBS:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LIBRARIES:WJ=/LIB,-$(.nl))/LIB-$(.nl) ,$(LINK-SHAREDLIBS:WJ=/SHARE,-$(.nl))/SHARE-$(.nl) ) + MC $(.B2) -f $(LINK-MAINMOD-JAM:W) + $(.LD) $(OPTIONS) /REPO=(REPOS:) /SHARE=$(<:W) $(LINK-MAINMOD-OPT:W)/OPT, $(<:WS=$INPUT.opt)/OPT ,$(LINK-OPTS:WJ=/OPT,)/OPT +} + + + +flags vmsdecc.archive AROPTIONS ; + + +local rule vms-join-wildcard-name ( path * : name ) +{ + local files ; + + if $(name) + { + for local d in $(path) + { + files += $(d)$(name) ; + } + + files ?= $(name) ; + + } + + return $(files) ; +} + + +rule archive ( targets + : sources * : properties * ) +{ + local clean.a = $(targets[1])(clean) ; + TEMPORARY $(clean.a) ; + NOCARE $(clean.a) ; + LOCATE on $(clean.a) = [ on $(targets[1]) return $(LOCATE) ] ; + DEPENDS $(clean.a) : $(sources) ; + DEPENDS $(targets) : $(clean.a) ; + common.RmTemps $(clean.a) : $(targets) ; + + + #CXX-REPOS on $(targets[1]) = null ; ## reset + + for local s in $(sources) + { + local r = [ on $(s) return $(TARGET-CXX-REPO) ] ; + + if ! $(r) in [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPOS on $(targets[1]) += $(r) ; + } + } + + if [ on $(targets[1]) return $(CXX-REPOS) ] + { + CXX-REPO-OBJS on $(targets[1]) = [ on $(targets[1]) return [ vms-join-wildcard-name $(CXX-REPOS:W) : *$(.OBJ) ] ] ; + + #DEPENDS $(targets) : [ on $(targets[1]) return $(CXX-REPO-OBJS) ] ; + } +} + +# Declare action for creating static libraries +actions piecemeal archive +{ + HAVE_REPO_OBJS = "F" + IF ("" +"$(CXX-REPO-OBJS[1])" .NES. "") + THEN + IF ( "" +F$SEARCH("$(CXX-REPO-OBJS[1])") .NES. "") + THEN + HAVE_REPO_OBJS = "T" + ENDIF + ENDIF + $(.AR) /CREATE /REPL $(AROPTIONS) $(<:W) $(>:WJ=,) + IF (HAVE_REPO_OBJS) + THEN + $(.AR) /REPL $(AROPTIONS) $(<:W) $(CXX-REPO-OBJS:J=,) + PIPE DEL /NOLOG /NOCONF $(CXX-REPO-OBJS:J=;*,);* 2>NL: >NL: + ENDIF +} + diff --git a/src/tools/xlcpp.jam b/src/tools/xlcpp.jam new file mode 100644 index 000000000..0d6e80cf5 --- /dev/null +++ b/src/tools/xlcpp.jam @@ -0,0 +1,151 @@ +# Copyright Vladimir Prus 2004. +# Copyright Toon Knapen 2004. +# Copyright Catherine Morton 2015. +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt +# or copy at http://www.boost.org/LICENSE_1_0.txt) + +# +# Boost.Build V2 toolset for the IBM XL C++ compiler +# + +import toolset : flags ; +import feature ; +import common ; +import generators ; +import os ; + +feature.extend toolset : xlcpp ; +toolset.inherit xlcpp : unix ; +generators.override xlcpp.prebuilt : builtin.prebuilt ; +generators.override xlcpp.searched-lib-generator : searched-lib-generator ; + +# Configure the xlcpp toolset +rule init ( version ? : command * : options * ) +{ + local condition = [ + common.check-init-parameters xlcpp : version $(version) ] ; + + command = [ common.get-invocation-command xlcpp : xlC + : $(command) : "/usr/xlcpp/bin/xlC" ] ; + + common.handle-options xlcpp : $(condition) : $(command) : $(options) ; +} + +# Declare generators +generators.register-c-compiler xlcpp.compile.c : C : OBJ : xlcpp ; +generators.register-c-compiler xlcpp.compile.c++ : CPP : OBJ : xlcpp ; + +# Allow C++ style comments in C files +flags xlcpp CFLAGS : -qnoxlcompatmacros ; + +# Declare flags +flags xlcpp CFLAGS off : -qNOOPTimize ; +flags xlcpp CFLAGS speed : ; +flags xlcpp CFLAGS space : -O2 -qcompact ; + +# Discretionary inlining (not recommended) +flags xlcpp CFLAGS off : -qnoinline ; +flags xlcpp CFLAGS on : -qinline ; +#flags xlcpp CFLAGS full : -qinline ; +flags xlcpp CFLAGS full : ; + +# Exception handling +flags xlcpp C++FLAGS off : -qnoeh ; +flags xlcpp C++FLAGS on : -qeh ; + +# Run-time Type Identification +flags xlcpp C++FLAGS off : -qnortti ; +flags xlcpp C++FLAGS on : -qrtti ; + +# Enable 64-bit memory addressing model +flags xlcpp CFLAGS 64 : -q64 ; +flags xlcpp LINKFLAGS 64 : -q64 ; +flags xlcpp ARFLAGS aix/64 : -X 64 ; + +# Use absolute path when generating debug information +flags xlcpp CFLAGS on : -g -qfullpath ; +flags xlcpp LINKFLAGS on : -g -qfullpath ; +flags xlcpp LINKFLAGS off : -s ; + +if [ os.name ] = AIX +{ + flags xlcpp.compile C++FLAGS : -qfuncsect ; + + # The -bnoipath strips the prepending (relative) path of libraries from + # the loader section in the target library or executable. Hence, during + # load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded + # -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without + # this option, the prepending (relative) path + library name is + # hard-coded in the loader section, causing *only* this path to be + # searched during load-time. Note that the AIX linker does not have an + # -soname equivalent, this is as close as it gets. + # + # The above options are definately for AIX 5.x, and most likely also for + # AIX 4.x and AIX 6.x. For details about the AIX linker see: + # http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf + # + flags xlcpp.link LINKFLAGS shared : -bnoipath ; + + # Run-time linking + flags xlcpp.link EXE-LINKFLAGS shared : -brtl ; +} +else +{ + # Linux PPC + flags xlcpp.compile CFLAGS shared : -qpic=large ; + flags xlcpp FINDLIBS : rt ; +} + +# Profiling +flags xlcpp CFLAGS on : -pg ; +flags xlcpp LINKFLAGS on : -pg ; + +flags xlcpp.compile OPTIONS ; +flags xlcpp.compile.c++ OPTIONS ; +flags xlcpp DEFINES ; +flags xlcpp UNDEFS ; +flags xlcpp HDRS ; +flags xlcpp STDHDRS ; +flags xlcpp.link OPTIONS ; +flags xlcpp ARFLAGS ; + +flags xlcpp LIBPATH ; +flags xlcpp NEEDLIBS ; +flags xlcpp FINDLIBS ; +flags xlcpp FINDLIBS ; + +# Select the compiler name according to the threading model. +flags xlcpp VA_C_COMPILER single : xlc ; +flags xlcpp VA_C_COMPILER multi : xlc ; +flags xlcpp VA_CXX_COMPILER single : xlC ; +flags xlcpp VA_CXX_COMPILER multi : xlC ; + +SPACE = " " ; + +flags xlcpp.link.dll HAVE_SONAME linux : "" ; + +actions xlcpp.link bind NEEDLIBS +{ + $(VA_CXX_COMPILER) $(EXE-LINKFLAGS) $(LINKFLAGS) -o "$(<[1])" -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions xlcpp.link.dll bind NEEDLIBS +{ + xlC -G $(LINKFLAGS) -o "$(<[1])" $(HAVE_SONAME)-Wl,-soname$(SPACE)-Wl,$(<[-1]:D=) -L$(LIBPATH) -L$(STDLIBPATH) "$(>)" "$(NEEDLIBS)" "$(NEEDLIBS)" -l$(FINDLIBS) $(OPTIONS) $(USER_OPTIONS) +} + +actions xlcpp.compile.c +{ + $(VA_C_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions xlcpp.compile.c++ +{ + $(VA_CXX_COMPILER) -c $(OPTIONS) $(USER_OPTIONS) -I$(BOOST_ROOT) -U$(UNDEFS) -D$(DEFINES) $(CFLAGS) $(C++FLAGS) -I"$(HDRS)" -I"$(STDHDRS)" -o "$(<)" "$(>)" +} + +actions updated together piecemeal xlcpp.archive +{ + ar $(ARFLAGS) ru "$(<)" "$(>)" +} diff --git a/src/util/__init__.py b/src/util/__init__.py index e6a277af7..7c847cb57 100644 --- a/src/util/__init__.py +++ b/src/util/__init__.py @@ -6,14 +6,153 @@ import types from itertools import groupby +def safe_isinstance(value, types=None, class_names=None): + """To prevent circular imports, this extends isinstance() + by checking also if `value` has a particular class name (or inherits from a + particular class name). This check is safe in that an AttributeError is not + raised in case `value` doesn't have a __class__ attribute. + """ + # inspect is being imported here because I seriously doubt + # that this function will be used outside of the type + # checking below. + import inspect + result = False + if types is not None: + result = result or isinstance(value, types) + if class_names is not None and not result: + # this doesn't work with inheritance, but normally + # either the class will already be imported within the module, + # or the class doesn't have any subclasses. For example: PropertySet + if isinstance(class_names, basestring): + class_names = [class_names] + # this is the part that makes it "safe". + try: + base_names = [class_.__name__ for class_ in inspect.getmro(value.__class__)] + for name in class_names: + if name in base_names: + return True + except AttributeError: + pass + return result + + +def is_iterable_typed(values, type_): + return is_iterable(values) and all(isinstance(v, type_) for v in values) + + +def is_iterable(value): + """Returns whether value is iterable and not a string.""" + return not isinstance(value, basestring) and hasattr(value, '__iter__') + + +def is_iterable_or_none(value): + return is_iterable(value) or value is None + + +def is_single_value(value): + # some functions may specify a bjam signature + # that is a string type, but still allow a + # PropertySet to be passed in + return safe_isinstance(value, (basestring, type(None)), 'PropertySet') + + +if __debug__: + + from textwrap import dedent + message = dedent( + """The parameter "{}" was passed in a wrong type for the "{}()" function. + Actual: + \ttype: {} + \tvalue: {} + Expected: + \t{} + """ + ) + + bjam_types = { + '*': is_iterable_or_none, + '+': is_iterable_or_none, + '?': is_single_value, + '': is_single_value, + } + + bjam_to_python = { + '*': 'iterable', + '+': 'iterable', + '?': 'single value', + '': 'single value', + } + + + def get_next_var(field): + it = iter(field) + var = it.next() + type_ = None + yield_var = False + while type_ not in bjam_types: + try: + # the first value has already + # been consumed outside of the loop + type_ = it.next() + except StopIteration: + # if there are no more values, then + # var still needs to be returned + yield_var = True + break + if type_ not in bjam_types: + # type_ is not a type and is + # another variable in the same field. + yield var, '' + # type_ is the next var + var = type_ + else: + # otherwise, type_ is a type for var + yield var, type_ + try: + # the next value should be a var + var = it.next() + except StopIteration: + # if not, then we're done with + # this field + break + if yield_var: + yield var, '' + + # Decorator the specifies bjam-side prototype for a Python function def bjam_signature(s): + if __debug__: + from inspect import getcallargs + def decorator(fn): + function_name = fn.__module__ + '.' + fn.__name__ + def wrapper(*args, **kwargs): + callargs = getcallargs(fn, *args, **kwargs) + for field in s: + for var, type_ in get_next_var(field): + try: + value = callargs[var] + except KeyError: + raise Exception( + 'Bjam Signature specifies a variable named "{}"\n' + 'but is not found within the python function signature\n' + 'for function {}()'.format(var, function_name) + ) + if not bjam_types[type_](value): + raise TypeError( + message.format(var, function_name, type(type_), repr(value), + bjam_to_python[type_]) + ) + return fn(*args, **kwargs) + wrapper.__name__ = fn.__name__ + wrapper.bjam_signature = s + return wrapper + return decorator + else: + def decorator(f): + f.bjam_signature = s + return f - def wrap(f): - f.bjam_signature = s - return f - - return wrap + return decorator def metatarget(f): @@ -58,9 +197,9 @@ def qualify_jam_action(action_name, context_module): ix = action_name.find('.') if ix != -1 and action_name[:ix] == context_module: return context_module + '%' + action_name[ix+1:] - - return context_module + '%' + action_name - + + return context_module + '%' + action_name + def set_jam_action(name, *args): diff --git a/src/util/doc.jam b/src/util/doc.jam index 702cab4b5..733fa7c97 100644 --- a/src/util/doc.jam +++ b/src/util/doc.jam @@ -264,7 +264,7 @@ local rule print-help-top ( ) print.list-item "-a Rebuild everything" ; print.list-item "-n Don't execute the commands, only print them" ; print.list-item "-d+2 Show commands as they are executed" ; - print.list-item "-d0 Supress all informational messages" ; + print.list-item "-d0 Suppress all informational messages" ; print.list-item "-q Stop at first error" ; print.list-item "--reconfigure Rerun all configuration checks" ; print.list-item "--debug-configuration Diagnose configuration" ; diff --git a/src/util/indirect.py b/src/util/indirect.py index 78fa89946..01c2e77c2 100644 --- a/src/util/indirect.py +++ b/src/util/indirect.py @@ -1,6 +1,6 @@ # Status: minimally ported. This module is not supposed to be used much # with Boost.Build/Python. -# +# # Copyright 2003 Dave Abrahams # Copyright 2003 Vladimir Prus # Distributed under the Boost Software License, Version 1.0. diff --git a/src/util/logger.py b/src/util/logger.py index de6521290..8da0434ae 100644 --- a/src/util/logger.py +++ b/src/util/logger.py @@ -7,25 +7,25 @@ import sys class NullLogger: def __init__ (self): self.indent_ = '' - + def log (self, source_name, *args): if self.on () and self.interesting (source_name): self.do_log (self.indent_) for i in args: self.do_log (i) self.do_log ('\n') - + def increase_indent (self): if self.on (): self.indent_ += ' ' - + def decrease_indent (self): if self.on () and len (self.indent_) > 4: self.indent_ = self.indent_ [-4:] def do_log (self, *args): pass - + def interesting (self, source_name): return False @@ -35,10 +35,10 @@ class NullLogger: class TextLogger (NullLogger): def __init__ (self): NullLogger.__init__ (self) - + def do_log (self, arg): sys.stdout.write (str (arg)) - + def interesting (self, source_name): return True diff --git a/src/util/option.py b/src/util/option.py index 47d6abdff..120c2a32c 100644 --- a/src/util/option.py +++ b/src/util/option.py @@ -13,9 +13,9 @@ options = {} # Set a value for a named option, to be used when not overridden on the command # line. def set(name, value=None): - + global options - + options[name] = value def get(name, default_value=None, implied_value=None): diff --git a/src/util/order.py b/src/util/order.py index 4e67b3f1a..de990b734 100644 --- a/src/util/order.py +++ b/src/util/order.py @@ -9,26 +9,26 @@ class Order: The primary use case is the gcc toolset, which is sensitive to library order: if library 'a' uses symbols from library 'b', then 'a' must be present before 'b' on the linker's command line. - + This requirement can be lifted for gcc with GNU ld, but for gcc with Solaris LD (and for Solaris toolset as well), the order always matters. - + So, we need to store order requirements and then order libraries according to them. It it not possible to use dependency graph as order requirements. What we need is "use symbols" relationship while dependency graph provides "needs to be updated" relationship. - + For example:: lib a : a.cpp b; lib b ; - + For static linking, the 'a' library need not depend on 'b'. However, it still should come before 'b' on the command line. """ def __init__ (self): self.constraints_ = [] - + def add_pair (self, first, second): """ Adds the constraint that 'first' should precede 'second'. """ @@ -37,7 +37,7 @@ class Order: def order (self, objects): """ Given a list of objects, reorder them so that the constains specified by 'add_pair' are satisfied. - + The algorithm was adopted from an awk script by Nikita Youshchenko (yoush at cs dot msu dot su) """ @@ -46,11 +46,11 @@ class Order: # rather removing edges. result = [] - if not objects: + if not objects: return result constraints = self.__eliminate_unused_constraits (objects) - + # Find some library that nobody depends upon and add it to # the 'result' array. obj = None @@ -68,7 +68,7 @@ class Order: new_objects.append (obj) obj = None objects = objects [1:] - + if not obj: raise BaseException ("Circular order dependencies") @@ -82,7 +82,7 @@ class Order: # Add the remaining objects for further processing # on the next iteration objects = new_objects - + return result def __eliminate_unused_constraits (self, objects): @@ -96,9 +96,9 @@ class Order: result.append (c) return result - + def __has_no_dependents (self, obj, constraints): - """ Returns true if there's no constraint in 'constraints' where + """ Returns true if there's no constraint in 'constraints' where 'obj' comes second. """ failed = False @@ -111,7 +111,7 @@ class Order: constraints = constraints [1:] return not failed - + def __remove_satisfied (self, constraints, obj): result = [] for c in constraints: diff --git a/src/util/os.jam b/src/util/os.jam index 818b0b423..7a5ef08dc 100644 --- a/src/util/os.jam +++ b/src/util/os.jam @@ -77,6 +77,12 @@ if $(.name) = NT .shared-library-path-variable-HAIKU = LIBRARY_PATH ; +.shared-library-path-variable-VMS = PATH ; +.path-separator-VMS = "," ; +.expand-variable-prefix-VMS = '' ; +.expand-variable-suffix-VMS = ' ; +.executable-suffix-VMS = .exe ; + # Default constants .shared-library-path-variable = LD_LIBRARY_PATH ; .path-separator = ":" ; @@ -149,7 +155,18 @@ rule on-windows ( ) } -if ! [ on-windows ] +rule on-vms ( ) +{ + local result ; + if [ modules.peek : VMS ] + { + result = true ; + } + return $(result) ; +} + + +if ! [ on-windows ] && ! [ on-vms ] { .on-unix = 1 ; } diff --git a/src/util/path.jam b/src/util/path.jam index 545d83c84..166c79330 100644 --- a/src/util/path.jam +++ b/src/util/path.jam @@ -569,67 +569,72 @@ rule split-path-VMS ( native ) # rule make-VMS ( native ) { - if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ] - { - import errors ; - errors.error "Can't handle default-device absolute paths: " $(native) ; - } + ## Use POSIX-style path (keep previous code commented out - real magic!). + ## VMS CRTL supports POSIX path, JAM is retrofitted to pass it to VMS CRTL. - local parts = [ split-path-VMS $(native) ] ; - local device = $(parts[1]) ; - local dir = $(parts[2]) ; - local file = $(parts[3]) ; - local elems ; + local portable = [ make-UNIX $(native) ] ; - if $(device) - { - # - # rooted - # - elems = /$(device) ; - } - - if $(dir) = "[]" - { - # - # Special case: current directory - # - elems = $(elems) "." ; - } - else if $(dir) - { - dir = [ regex.replace $(dir) "\\[|\\]" "" ] ; - local dir_parts = [ regex.split $(dir) \\. ] ; - - if $(dir_parts[1]) = "" - { - # - # Relative path - # - dir_parts = $(dir_parts[2--1]) ; - } - - # - # replace "parent-directory" parts (- => ..) - # - dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ; - - elems = $(elems) $(dir_parts) ; - } - - if $(file) - { - if ! [ MATCH (\\.) : $(file) ] - { - # - # Always add "." to end of non-extension file. - # - file = $(file). ; - } - elems = $(elems) $(file) ; - } - - local portable = [ path.join $(elems) ] ; + #if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ] + #{ + # import errors ; + # errors.error "Can't handle default-device absolute paths: " $(native) ; + #} + # + #local parts = [ split-path-VMS $(native) ] ; + #local device = $(parts[1]) ; + #local dir = $(parts[2]) ; + #local file = $(parts[3]) ; + #local elems ; + # + #if $(device) + #{ + # # + # # rooted + # # + # elems = /$(device) ; + #} + # + #if $(dir) = "[]" + #{ + # # + # # Special case: current directory + # # + # elems = $(elems) "." ; + #} + #else if $(dir) + #{ + # dir = [ regex.replace $(dir) "\\[|\\]" "" ] ; + # local dir_parts = [ regex.split $(dir) \\. ] ; + # + # if $(dir_parts[1]) = "" + # { + # # + # # Relative path + # # + # dir_parts = $(dir_parts[2--1]) ; + # } + # + # # + # # replace "parent-directory" parts (- => ..) + # # + # dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ; + # + # elems = $(elems) $(dir_parts) ; + #} + # + #if $(file) + #{ + # if ! [ MATCH (\\.) : $(file) ] + # { + # # + # # Always add "." to end of non-extension file. + # # + # file = $(file). ; + # } + # elems = $(elems) $(file) ; + #} + # + #portable = [ path.join $(elems) ] ; return $(portable) ; } @@ -642,67 +647,90 @@ rule make-VMS ( native ) # rule native-VMS ( path ) { - local device = "" ; - local dir = $(path) ; - local file = "" ; - local native ; - local split ; + ## Use POSIX-style path (keep previous code commented out - real magic!). + ## VMS CRTL supports POSIX path, JAM is retrofitted to pass it to VMS CRTL. + ## NOTE: While translation to VMS-style is implemented with $(:W) modifier, + ## Here we retain POSIX-style path, so it can be portably manipulated + ## in B2 rules, and only in actions it's translated with $(:W). - # - # Has device ? - # - if [ is-rooted $(dir) ] - { - split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ; - device = $(split[1]) ; - dir = $(split[2]) ; - } + local native = [ native-UNIX $(path) ] ; + #local device = "" ; + #local dir = $(path) ; + #local file = "" ; + #local split ; # - # Has file ? + ## + ## Has device ? + ## + #if [ is-rooted $(dir) ] + #{ + # split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ; + # device = $(split[1]) ; + # dir = $(split[2]) ; + #} # - # This is no exact science, just guess work: + ## + ## Has file ? + ## + ## This is no exact science, just guess work: + ## + ## If the last part of the current path spec includes some chars, followed by + ## a dot, optionally followed by more chars - then it is a file (keep your + ## fingers crossed). + ## + #split = [ regex.split $(dir) / ] ; + #local maybe_file = $(split[-1]) ; # - # If the last part of the current path spec includes some chars, followed by - # a dot, optionally followed by more chars - then it is a file (keep your - # fingers crossed). + #if [ MATCH ^([^.]+\\..*) : $(maybe_file) ] + #{ + # file = $(maybe_file) ; + # dir = [ sequence.join $(split[1--2]) : / ] ; + #} # - split = [ regex.split $(dir) / ] ; - local maybe_file = $(split[-1]) ; - - if [ MATCH ^([^.]+\\..*) : $(maybe_file) ] - { - file = $(maybe_file) ; - dir = [ sequence.join $(split[1--2]) : / ] ; - } - + ## + ## Has dir spec ? + ## + #if $(dir) = "." + #{ + # dir = "[]" ; + #} + #else if $(dir) + #{ + # dir = [ regex.replace $(dir) \\.\\. - ] ; + # dir = [ regex.replace $(dir) / . ] ; # - # Has dir spec ? + # if $(device) = "" + # { + # # + # # Relative directory + # # + # dir = "."$(dir) ; + # } + # dir = "["$(dir)"]" ; + #} # - if $(dir) = "." - { - dir = "[]" ; - } - else if $(dir) - { - dir = [ regex.replace $(dir) \\.\\. - ] ; - dir = [ regex.replace $(dir) / . ] ; - - if $(device) = "" - { - # - # Relative directory - # - dir = "."$(dir) ; - } - dir = "["$(dir)"]" ; - } - - native = [ sequence.join $(device) $(dir) $(file) ] ; + #native = [ sequence.join $(device) $(dir) $(file) ] ; return $(native) ; } + +if $(os) = VMS +{ + # Translates POSIX-style path to VMS-style path + # + # This results in actual VMS path, unlike 'native-VMS' rule which is meant + # to return POSIX-style in order to mask VMS specificity and help portability. + + rule to-VMS ( path ) + { + return $(path:W) ; + } + + EXPORT $(__name__) : to-$(os) ; +} + # Remove one level of indirection IMPORT $(__name__) : make-$(os) native-$(os) : $(__name__) : make native ; EXPORT $(__name__) : make native ; @@ -847,64 +875,73 @@ rule __test__ ( ) modules.poke path : os : VMS ; - # - # Do not really need to poke os before these - # - assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ; - assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ; - assert.result "disk:" "" "" : split-path-VMS "disk:" ; - assert.result "disk:" "" "file" : split-path-VMS "disk:file" ; - assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ; - assert.result "" "[dir]" "" : split-path-VMS "[dir]" ; - assert.result "" "" "file" : split-path-VMS "file" ; - assert.result "" "" "" : split-path-VMS "" ; + ## On VMS use POSIX-style path (keep previous tests commented out). - # - # Special case: current directory - # - assert.result "" "[]" "" : split-path-VMS "[]" ; - assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ; - assert.result "" "[]" "file" : split-path-VMS "[]file" ; - assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ; + assert.result "foo/bar/giz" : make-VMS "foo/bar/giz" ; + assert.result "/sub1" : make-VMS "/sub1/." ; + assert.result "/sub1" : make-VMS "/sub1/sub2/.." ; + assert.result "sub1" : make-VMS "sub1/." ; + assert.result "sub1" : make-VMS "sub1/sub2/.." ; + assert.result "/foo/bar" : native-VMS "/foo/bar" ; + ## + ## Do not really need to poke os before these + ## + #assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ; + #assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ; + #assert.result "disk:" "" "" : split-path-VMS "disk:" ; + #assert.result "disk:" "" "file" : split-path-VMS "disk:file" ; + #assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ; + #assert.result "" "[dir]" "" : split-path-VMS "[dir]" ; + #assert.result "" "" "file" : split-path-VMS "file" ; + #assert.result "" "" "" : split-path-VMS "" ; # - # Make portable paths + ## + ## Special case: current directory + ## + #assert.result "" "[]" "" : split-path-VMS "[]" ; + #assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ; + #assert.result "" "[]" "file" : split-path-VMS "[]file" ; + #assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ; # - assert.result "/disk:" : make-VMS "disk:" ; - assert.result "foo/bar/giz" : make-VMS "[.foo.bar.giz]" ; - assert.result "foo" : make-VMS "[.foo]" ; - assert.result "foo" : make-VMS "[.foo.bar.-]" ; - assert.result ".." : make-VMS "[.-]" ; - assert.result ".." : make-VMS "[-]" ; - assert.result "." : make-VMS "[]" ; - assert.result "giz.h" : make-VMS "giz.h" ; - assert.result "foo/bar/giz.h" : make-VMS "[.foo.bar]giz.h" ; - assert.result "/disk:/my_docs" : make-VMS "disk:[my_docs]" ; - assert.result "/disk:/boost/tools/build/new/project.jam" : make-VMS - "disk:[boost.tools.build.test.-.new]project.jam" ; - + ## + ## Make portable paths + ## + #assert.result "/disk:" : make-VMS "disk:" ; + #assert.result "foo/bar/giz" : make-VMS "[.foo.bar.giz]" ; + #assert.result "foo" : make-VMS "[.foo]" ; + #assert.result "foo" : make-VMS "[.foo.bar.-]" ; + #assert.result ".." : make-VMS "[.-]" ; + #assert.result ".." : make-VMS "[-]" ; + #assert.result "." : make-VMS "[]" ; + #assert.result "giz.h" : make-VMS "giz.h" ; + #assert.result "foo/bar/giz.h" : make-VMS "[.foo.bar]giz.h" ; + #assert.result "/disk:/my_docs" : make-VMS "disk:[my_docs]" ; + #assert.result "/disk:/boost/tools/build/new/project.jam" : make-VMS + # "disk:[boost.tools.build.test.-.new]project.jam" ; # - # Special case (adds '.' to end of file w/o extension to disambiguate from - # directory in portable path spec) + ## + ## Special case (adds '.' to end of file w/o extension to disambiguate from + ## directory in portable path spec) + ## + #assert.result "Jamfile." : make-VMS "Jamfile" ; + #assert.result "dir/Jamfile." : make-VMS "[.dir]Jamfile" ; + #assert.result "/disk:/dir/Jamfile." : make-VMS "disk:[dir]Jamfile" ; # - assert.result "Jamfile." : make-VMS "Jamfile" ; - assert.result "dir/Jamfile." : make-VMS "[.dir]Jamfile" ; - assert.result "/disk:/dir/Jamfile." : make-VMS "disk:[dir]Jamfile" ; - - # - # Make native paths - # - assert.result "disk:" : native-VMS "/disk:" ; - assert.result "[.foo.bar.giz]" : native-VMS "foo/bar/giz" ; - assert.result "[.foo]" : native-VMS "foo" ; - assert.result "[.-]" : native-VMS ".." ; - assert.result "[.foo.-]" : native-VMS "foo/.." ; - assert.result "[]" : native-VMS "." ; - assert.result "disk:[my_docs.work]" : native-VMS "/disk:/my_docs/work" ; - assert.result "giz.h" : native-VMS "giz.h" ; - assert.result "disk:Jamfile." : native-VMS "/disk:Jamfile." ; - assert.result "disk:[my_docs.work]Jamfile." : native-VMS - "/disk:/my_docs/work/Jamfile." ; + ## + ## Make native paths + ## + #assert.result "disk:" : native-VMS "/disk:" ; + #assert.result "[.foo.bar.giz]" : native-VMS "foo/bar/giz" ; + #assert.result "[.foo]" : native-VMS "foo" ; + #assert.result "[.-]" : native-VMS ".." ; + #assert.result "[.foo.-]" : native-VMS "foo/.." ; + #assert.result "[]" : native-VMS "." ; + #assert.result "disk:[my_docs.work]" : native-VMS "/disk:/my_docs/work" ; + #assert.result "giz.h" : native-VMS "giz.h" ; + #assert.result "disk:Jamfile." : native-VMS "/disk:Jamfile." ; + #assert.result "disk:[my_docs.work]Jamfile." : native-VMS + # "/disk:/my_docs/work/Jamfile." ; modules.poke path : os : $(save-os) ; } diff --git a/src/util/path.py b/src/util/path.py index d602598c9..7b9032073 100644 --- a/src/util/path.py +++ b/src/util/path.py @@ -7,13 +7,13 @@ # all copies. This software is provided "as is" without express or implied # warranty, and with no claim as to its suitability for any purpose. -# Performs various path manipulations. Path are always in a 'normilized' +# Performs various path manipulations. Path are always in a 'normilized' # representation. In it, a path may be either: # # - '.', or # # - ['/'] [ ( '..' '/' )* (token '/')* token ] -# +# # In plain english, path can be rooted, '..' elements are allowed only # at the beginning, and it never ends in slash, except for path consisting # of slash only. @@ -40,6 +40,7 @@ def make (native): # TODO: make os selection here. return make_UNIX (native) +@bjam_signature([['native']]) def make_UNIX (native): # VP: I have no idea now 'native' can be empty here! But it can! @@ -60,7 +61,7 @@ def native_UNIX (path): def pwd (): """ Returns the current working directory. - # TODO: is it a good idea to use the current dir? Some use-cases + # TODO: is it a good idea to use the current dir? Some use-cases may not allow us to depend on the current dir. """ return make (os.getcwd ()) @@ -79,38 +80,38 @@ def is_rooted (path): # # distribute this software is granted provided this copyright notice appears in # # all copies. This software is provided "as is" without express or implied # # warranty, and with no claim as to its suitability for any purpose. -# -# # Performs various path manipulations. Path are always in a 'normilized' +# +# # Performs various path manipulations. Path are always in a 'normilized' # # representation. In it, a path may be either: # # # # - '.', or # # # # - ['/'] [ ( '..' '/' )* (token '/')* token ] -# # +# # # # In plain english, path can be rooted, '..' elements are allowed only # # at the beginning, and it never ends in slash, except for path consisting # # of slash only. -# +# # import modules ; # import sequence ; # import regex ; # import errors : error ; -# -# +# +# # os = [ modules.peek : OS ] ; -# if [ modules.peek : UNIX ] -# { +# if [ modules.peek : UNIX ] +# { # local uname = [ modules.peek : JAMUNAME ] ; # switch $(uname) # { # case CYGWIN* : # os = CYGWIN ; -# +# # case * : # os = UNIX ; -# } +# } # } -# +# # # # # Tests if a path is rooted. # # @@ -118,7 +119,7 @@ def is_rooted (path): # { # return [ MATCH "^(/)" : $(path) ] ; # } -# +# # # # # Tests if a path has a parent. # # @@ -130,7 +131,7 @@ def is_rooted (path): # return ; # } # } -# +# # # # # Returns the path without any directory components. # # @@ -138,22 +139,22 @@ def is_rooted (path): # { # return [ MATCH "([^/]+)$" : $(path) ] ; # } -# +# # # # # Returns parent directory of the path. If no parent exists, error is issued. # # # rule parent ( path ) # { # if [ has-parent $(path) ] { -# +# # if $(path) = . { # return .. ; # } else { -# +# # # Strip everything at the end of path up to and including # # the last slash # local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ; -# +# # # Did we strip what we shouldn't? # if $(result[2]) = ".." { # return $(path)/.. ; @@ -172,7 +173,7 @@ def is_rooted (path): # error "Path '$(path)' has no parent" ; # } # } -# +# # # # # Returns path2 such that "[ join path path2 ] = .". # # The path may not contain ".." element or be rooted. @@ -231,19 +232,19 @@ def reverse(path): # { # return [ NORMALIZE_PATH $(elements:J="/") ] ; # } -# +# # # # # Contanenates the passed path elements. Generates an error if # # any element other than the first one is rooted. # # # rule join ( elements + ) # { -# if ! $(elements[2]) +# if ! $(elements[2]) # { # return $(elements[1]) ; # } # else -# { +# { # for local e in $(elements[2-]) # { # if [ is-rooted $(e) ] @@ -252,13 +253,13 @@ def reverse(path): # } # } # return [ join-imp $(elements) ] ; -# } +# } # } def glob (dirs, patterns): """ Returns the list of files matching the given pattern in the - specified directory. Both directories and patterns are + specified directory. Both directories and patterns are supplied as portable paths. Each pattern should be non-absolute path, and can't contain "." or ".." elements. Each slash separated element of pattern can contain the following special characters: @@ -266,10 +267,10 @@ def glob (dirs, patterns): - '*', which matches arbitrary number of characters. A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and only if e1 matches p1, e2 matches p2 and so on. - - For example: - [ glob . : *.cpp ] - [ glob . : */build/Jamfile ] + + For example: + [ glob . : *.cpp ] + [ glob . : */build/Jamfile ] """ # { # local result ; @@ -282,43 +283,43 @@ def glob (dirs, patterns): # # First glob for directory part. # local globbed-dirs = [ glob $(dirs) : $(p:D) ] ; # result += [ glob $(globbed-dirs) : $(p:D="") ] ; -# } +# } # } # else -# { +# { # # When a pattern has not directory, we glob directly. # # Take care of special ".." value. The "GLOB" rule simply ignores # # the ".." element (and ".") element in directory listings. This is -# # needed so that +# # needed so that # # # # [ glob libs/*/Jamfile ] # # -# # don't return +# # don't return # # # # libs/../Jamfile (which is the same as ./Jamfile) # # # # On the other hand, when ".." is explicitly present in the pattern # # we need to return it. -# # +# # # for local dir in $(dirs) # { # for local p in $(patterns) -# { +# { # if $(p) != ".." -# { -# result += [ sequence.transform make +# { +# result += [ sequence.transform make # : [ GLOB [ native $(dir) ] : $(p) ] ] ; -# } +# } # else # { # result += [ path.join $(dir) .. ] ; -# } -# } +# } +# } # } -# } +# } # return $(result) ; # } -# +# # TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested. result = [] @@ -335,7 +336,7 @@ def glob (dirs, patterns): import glob result.extend (glob.glob (p)) return result - + # # Find out the absolute name of path and returns the list of all the parents, # starting with the immediate one. Parents are returned as relative names. @@ -354,7 +355,7 @@ def all_parents(path, upper_limit=None, cwd=None): result = [] while path_abs and path_abs != upper_limit: (head, tail) = os.path.split(path) - path = os.path.join(path, "..") + path = os.path.join(path, "..") result.append(path) path_abs = head @@ -362,7 +363,7 @@ def all_parents(path, upper_limit=None, cwd=None): raise BaseException("'%s' is not a prefix of '%s'" % (upper_limit, path)) return result - + # Search for 'pattern' in parent directories of 'dir', up till and including # 'upper_limit', if it is specified, or till the filesystem root otherwise. # @@ -377,22 +378,22 @@ def glob_in_parents(dir, patterns, upper_limit=None): return result -# +# # # # # Assuming 'child' is a subdirectory of 'parent', return the relative # # path from 'parent' to 'child' # # # rule relative ( child parent ) # { -# if $(parent) = "." +# if $(parent) = "." # { # return $(child) ; # } -# else -# { +# else +# { # local split1 = [ regex.split $(parent) / ] ; # local split2 = [ regex.split $(child) / ] ; -# +# # while $(split1) # { # if $(split1[1]) = $(split2[1]) @@ -403,12 +404,12 @@ def glob_in_parents(dir, patterns, upper_limit=None): # else # { # errors.error $(child) is not a subdir of $(parent) ; -# } -# } -# return [ join $(split2) ] ; -# } +# } +# } +# return [ join $(split2) ] ; +# } # } -# +# # # Returns the minimal path to path2 that is relative path1. # # # rule relative-to ( path1 path2 ) @@ -416,7 +417,7 @@ def glob_in_parents(dir, patterns, upper_limit=None): # local root_1 = [ regex.split [ reverse $(path1) ] / ] ; # local split1 = [ regex.split $(path1) / ] ; # local split2 = [ regex.split $(path2) / ] ; -# +# # while $(split1) && $(root_1) # { # if $(split1[1]) = $(split2[1]) @@ -438,10 +439,10 @@ def glob_in_parents(dir, patterns, upper_limit=None): def programs_path (): raw = [] names = ['PATH', 'Path', 'path'] - + for name in names: raw.append(os.environ.get (name, '')) - + result = [] for elem in raw: if elem: @@ -458,40 +459,40 @@ def programs_path (): # { # local tokens = [ regex.split $(native) "[/\\]" ] ; # local result ; -# +# # # Handle paths ending with slashes # if $(tokens[-1]) = "" # { # tokens = $(tokens[1--2]) ; # discard the empty element # } -# +# # result = [ path.join $(tokens) ] ; -# +# # if [ regex.match "(^.:)" : $(native) ] # { # result = /$(result) ; # } -# +# # if $(native) = "" # { # result = "." ; # } -# +# # return $(result) ; # } -# +# # rule native-NT ( path ) # { # local result = [ MATCH "^/?(.*)" : $(path) ] ; # result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ; # return $(result) ; # } -# +# # rule make-CYGWIN ( path ) # { # return [ make-NT $(path) ] ; # } -# +# # rule native-CYGWIN ( path ) # { # local result = $(path) ; @@ -501,7 +502,7 @@ def programs_path (): # } # return [ native-UNIX $(result) ] ; # } -# +# # # # # split-VMS: splits input native path into # # device dir file (each part is optional), @@ -515,10 +516,10 @@ def programs_path (): # local device = $(matches[1]) ; # local dir = $(matches[2]) ; # local file = $(matches[3]) ; -# +# # return $(device) $(dir) $(file) ; # } -# +# # # # # Converts a native VMS path into a portable path spec. # # @@ -535,13 +536,13 @@ def programs_path (): # { # errors.error "Can't handle default-device absolute paths: " $(native) ; # } -# +# # local parts = [ split-path-VMS $(native) ] ; # local device = $(parts[1]) ; # local dir = $(parts[2]) ; # local file = $(parts[3]) ; # local elems ; -# +# # if $(device) # { # # @@ -549,7 +550,7 @@ def programs_path (): # # # elems = /$(device) ; # } -# +# # if $(dir) = "[]" # { # # @@ -561,7 +562,7 @@ def programs_path (): # { # dir = [ regex.replace $(dir) "\\[|\\]" "" ] ; # local dir_parts = [ regex.split $(dir) \\. ] ; -# +# # if $(dir_parts[1]) = "" # { # # @@ -569,15 +570,15 @@ def programs_path (): # # # dir_parts = $(dir_parts[2--1]) ; # } -# +# # # # # replace "parent-directory" parts (- => ..) # # # dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ; -# +# # elems = $(elems) $(dir_parts) ; # } -# +# # if $(file) # { # if ! [ MATCH (\\.) : $(file) ] @@ -589,12 +590,12 @@ def programs_path (): # } # elems = $(elems) $(file) ; # } -# +# # local portable = [ path.join $(elems) ] ; -# +# # return $(portable) ; # } -# +# # # # # Converts a portable path spec into a native VMS path. # # @@ -608,7 +609,7 @@ def programs_path (): # local file = "" ; # local native ; # local split ; -# +# # # # # Has device ? # # @@ -618,7 +619,7 @@ def programs_path (): # device = $(split[1]) ; # dir = $(split[2]) ; # } -# +# # # # # Has file ? # # @@ -631,13 +632,13 @@ def programs_path (): # # # split = [ regex.split $(dir) / ] ; # local maybe_file = $(split[-1]) ; -# +# # if [ MATCH ^([^.]+\\..*) : $(maybe_file) ] # { # file = $(maybe_file) ; # dir = [ sequence.join $(split[1--2]) : / ] ; # } -# +# # # # # Has dir spec ? # # @@ -649,59 +650,59 @@ def programs_path (): # { # dir = [ regex.replace $(dir) \\.\\. - ] ; # dir = [ regex.replace $(dir) / . ] ; -# +# # if $(device) = "" # { # # # # Relative directory -# # +# # # dir = "."$(dir) ; # } # dir = "["$(dir)"]" ; # } -# +# # native = [ sequence.join $(device) $(dir) $(file) ] ; -# +# # return $(native) ; # } -# -# +# +# # rule __test__ ( ) { -# +# # import assert ; # import errors : try catch ; -# +# # assert.true is-rooted "/" ; # assert.true is-rooted "/foo" ; # assert.true is-rooted "/foo/bar" ; # assert.result : is-rooted "." ; # assert.result : is-rooted "foo" ; # assert.result : is-rooted "foo/bar" ; -# +# # assert.true has-parent "foo" ; # assert.true has-parent "foo/bar" ; # assert.true has-parent "." ; # assert.result : has-parent "/" ; -# +# # assert.result "." : basename "." ; # assert.result ".." : basename ".." ; # assert.result "foo" : basename "foo" ; # assert.result "foo" : basename "bar/foo" ; # assert.result "foo" : basename "gaz/bar/foo" ; # assert.result "foo" : basename "/gaz/bar/foo" ; -# +# # assert.result "." : parent "foo" ; # assert.result "/" : parent "/foo" ; # assert.result "foo/bar" : parent "foo/bar/giz" ; # assert.result ".." : parent "." ; # assert.result ".." : parent "../foo" ; # assert.result "../../foo" : parent "../../foo/bar" ; -# -# +# +# # assert.result "." : reverse "." ; # assert.result ".." : reverse "foo" ; # assert.result "../../.." : reverse "foo/bar/giz" ; -# +# # assert.result "foo" : join "foo" ; # assert.result "/foo" : join "/" "foo" ; # assert.result "foo/bar" : join "foo" "bar" ; @@ -714,57 +715,57 @@ def programs_path (): # assert.result "foo/giz" : join "foo/giz" "." ; # assert.result "." : join lib2 ".." ; # assert.result "/" : join "/a" ".." ; -# +# # assert.result /a/b : join /a/b/c .. ; -# +# # assert.result "foo/bar/giz" : join "foo" "bar" "giz" ; # assert.result "giz" : join "foo" ".." "giz" ; # assert.result "foo/giz" : join "foo" "." "giz" ; -# +# # try ; # { # join "a" "/b" ; # } # catch only first element may be rooted ; -# +# # local CWD = "/home/ghost/build" ; # assert.result : all-parents . : . : $(CWD) ; # assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ; # assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ; # assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ; -# +# # local CWD = "/home/ghost" ; # assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ; # assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ; -# +# # assert.result "c/d" : relative "a/b/c/d" "a/b" ; # assert.result "foo" : relative "foo" "." ; -# +# # local save-os = [ modules.peek path : os ] ; # modules.poke path : os : NT ; -# +# # assert.result "foo/bar/giz" : make "foo/bar/giz" ; # assert.result "foo/bar/giz" : make "foo\\bar\\giz" ; # assert.result "foo" : make "foo/." ; # assert.result "foo" : make "foo/bar/.." ; # assert.result "/D:/My Documents" : make "D:\\My Documents" ; # assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ; -# +# # assert.result "foo\\bar\\giz" : native "foo/bar/giz" ; # assert.result "foo" : native "foo" ; # assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ; -# +# # modules.poke path : os : UNIX ; -# +# # assert.result "foo/bar/giz" : make "foo/bar/giz" ; # assert.result "/sub1" : make "/sub1/." ; -# assert.result "/sub1" : make "/sub1/sub2/.." ; +# assert.result "/sub1" : make "/sub1/sub2/.." ; # assert.result "sub1" : make "sub1/." ; # assert.result "sub1" : make "sub1/sub2/.." ; # assert.result "/foo/bar" : native "/foo/bar" ; -# +# # modules.poke path : os : VMS ; -# +# # # # # Don't really need to poke os before these # # @@ -776,7 +777,7 @@ def programs_path (): # assert.result "" "[dir]" "" : split-path-VMS "[dir]" ; # assert.result "" "" "file" : split-path-VMS "file" ; # assert.result "" "" "" : split-path-VMS "" ; -# +# # # # # Special case: current directory # # @@ -784,7 +785,7 @@ def programs_path (): # assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ; # assert.result "" "[]" "file" : split-path-VMS "[]file" ; # assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ; -# +# # # # # Make portable paths # # @@ -799,7 +800,7 @@ def programs_path (): # assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ; # assert.result "/disk:/my_docs" : make "disk:[my_docs]" ; # assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ; -# +# # # # # Special case (adds '.' to end of file w/o extension to # # disambiguate from directory in portable path spec). @@ -807,7 +808,7 @@ def programs_path (): # assert.result "Jamfile." : make "Jamfile" ; # assert.result "dir/Jamfile." : make "[.dir]Jamfile" ; # assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ; -# +# # # # # Make native paths # # @@ -821,9 +822,9 @@ def programs_path (): # assert.result "giz.h" : native "giz.h" ; # assert.result "disk:Jamfile." : native "/disk:Jamfile." ; # assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ; -# +# # modules.poke path : os : $(save-os) ; -# +# # } # @@ -837,7 +838,7 @@ def programs_path (): def glob(dirs, patterns, exclude_patterns=None): """Returns the list of files matching the given pattern in the - specified directory. Both directories and patterns are + specified directory. Both directories and patterns are supplied as portable paths. Each pattern should be non-absolute path, and can't contain '.' or '..' elements. Each slash separated element of pattern can contain the following special characters: @@ -845,8 +846,8 @@ def glob(dirs, patterns, exclude_patterns=None): - '*', which matches arbitrary number of characters. A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3 if and only if e1 matches p1, e2 matches p2 and so on. - For example: - [ glob . : *.cpp ] + For example: + [ glob . : *.cpp ] [ glob . : */build/Jamfile ] """ @@ -858,7 +859,7 @@ def glob(dirs, patterns, exclude_patterns=None): else: assert(isinstance(exclude_patterns, list)) - real_patterns = [os.path.join(d, p) for p in patterns for d in dirs] + real_patterns = [os.path.join(d, p) for p in patterns for d in dirs] real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns for d in dirs] @@ -883,14 +884,14 @@ def glob_tree(roots, patterns, exclude_patterns=None): subdirs = [s for s in glob(roots, ["*"]) if s != "." and s != ".." and os.path.isdir(s)] if subdirs: result.extend(glob_tree(subdirs, patterns, exclude_patterns)) - + return result def glob_in_parents(dir, patterns, upper_limit=None): """Recursive version of GLOB which glob sall parent directories of dir until the first match is found. Returns an empty result if no match - is found""" - + is found""" + assert(isinstance(dir, str)) assert(isinstance(patterns, list)) diff --git a/src/util/print.jam b/src/util/print.jam index c867e4e1b..8a9e5b4c8 100644 --- a/src/util/print.jam +++ b/src/util/print.jam @@ -15,6 +15,7 @@ import regex ; import "class" ; import scanner ; import path ; +import os ; # The current output target. Defaults to console. output-target = console ; @@ -25,6 +26,14 @@ output-type = plain ; # Whitespace. .whitespace = [ string.whitespace ] ; +# Redirect +.redirect-out = ">" ; +.redirect-append = ">>" ; +if [ os.name ] = VMS +{ + .redirect-out = "| TYPE SYS$INPUT /OUT=" ; + .redirect-append = "| APPEND/NEW SYS$INPUT " ; +} # Set the target and type of output to generate. This sets both the destination # output and the type of docs to generate to that output. The target can be @@ -337,10 +346,10 @@ rule text ( nl on $(output-target) = " " ; - text-redirect on $(output-target) = ">>" ; + text-redirect on $(output-target) = $(.redirect-append) ; if $(overwrite) { - text-redirect on $(output-target) = ">" ; + text-redirect on $(output-target) = $(.redirect-out) ; } text-content on $(output-target) = ; @@ -402,6 +411,13 @@ actions quietly text-action @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) "$(<)" } +if [ os.name ] = VMS +{ + actions quietly text-action + { + @($(STDOUT):E=$(text-content:J=$(nl))) $(text-redirect) $(<:W) + } +} rule get-scanner ( ) { @@ -460,6 +476,10 @@ class print-scanner : scanner { actual-content = [ SHELL "type \"$(file)\" 2>nul" ] ; } + else if [ os.name ] = VMS + { + actual-content = [ SHELL "PIPE TYPE $(file:W) 2>NL:" ] ; + } else { actual-content = [ SHELL "cat \"$(file)\" 2>/dev/null" ] ; diff --git a/src/util/sequence.py b/src/util/sequence.py index 1d32efd2e..b5dddbade 100644 --- a/src/util/sequence.py +++ b/src/util/sequence.py @@ -5,7 +5,11 @@ import operator +from b2.util import is_iterable + + def unique (values, stable=False): + assert is_iterable(values) if stable: s = set() r = [] @@ -21,6 +25,8 @@ def max_element (elements, ordered = None): """ Returns the maximum number in 'elements'. Uses 'ordered' for comparisons, or '<' is none is provided. """ + assert is_iterable(elements) + assert callable(ordered) or ordered is None if not ordered: ordered = operator.lt max = elements [0] @@ -34,6 +40,8 @@ def select_highest_ranked (elements, ranks): """ Returns all of 'elements' for which corresponding element in parallel list 'rank' is equal to the maximum value in 'rank'. """ + assert is_iterable(elements) + assert is_iterable(ranks) if not elements: return [] diff --git a/src/util/set.py b/src/util/set.py index dc7cf3282..f2239a021 100644 --- a/src/util/set.py +++ b/src/util/set.py @@ -3,11 +3,15 @@ # all copies. This software is provided "as is" without express or implied # warranty, and with no claim as to its suitability for any purpose. -from utility import to_seq +from b2.util import is_iterable +from .utility import to_seq + def difference (b, a): """ Returns the elements of B that are not in A. """ + assert is_iterable(b) + assert is_iterable(a) result = [] for element in b: if not element in a: @@ -18,6 +22,8 @@ def difference (b, a): def intersection (set1, set2): """ Removes from set1 any items which don't appear in set2 and returns the result. """ + assert is_iterable(set1) + assert is_iterable(set2) result = [] for v in set1: if v in set2: @@ -39,4 +45,6 @@ def equal (a, b): """ Returns True iff 'a' contains the same elements as 'b', irrespective of their order. # TODO: Python 2.4 has a proper set class. """ + assert is_iterable(a) + assert is_iterable(b) return contains (a, b) and contains (b, a) diff --git a/src/util/utility.py b/src/util/utility.py index afea765b9..162a57be4 100644 --- a/src/util/utility.py +++ b/src/util/utility.py @@ -11,6 +11,7 @@ import re import os import bjam from b2.exceptions import * +from b2.util import is_iterable_typed __re_grist_and_value = re.compile (r'(<[^>]*>)(.*)') __re_grist_content = re.compile ('^<(.*)>$') @@ -40,13 +41,13 @@ def add_grist (features): features: one string or a sequence of strings return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence """ - + assert is_iterable_typed(features, basestring) or isinstance(features, basestring) def grist_one (feature): if feature [0] != '<' and feature [len (feature) - 1] != '>': return '<' + feature + '>' else: return feature - + if isinstance (features, str): return grist_one (features) else: @@ -56,6 +57,8 @@ def replace_grist (features, new_grist): """ Replaces the grist of a string by a new one. Returns the string with the new grist. """ + assert is_iterable_typed(features, basestring) or isinstance(features, basestring) + assert isinstance(new_grist, basestring) def replace_grist_one (name, new_grist): split = __re_grist_and_value.match (name) if not split: @@ -71,12 +74,14 @@ def replace_grist (features, new_grist): def get_value (property): """ Gets the value of a property, that is, the part following the grist, if any. """ + assert is_iterable_typed(property, basestring) or isinstance(property, basestring) return replace_grist (property, '') - + def get_grist (value): """ Returns the grist of a string. If value is a sequence, does it for every value and returns the result as a sequence. """ + assert is_iterable_typed(value, basestring) or isinstance(value, basestring) def get_grist_one (name): split = __re_grist_and_value.match (name) if not split: @@ -90,9 +95,10 @@ def get_grist (value): return [ get_grist_one (v) for v in value ] def ungrist (value): - """ Returns the value without grist. + """ Returns the value without grist. If value is a sequence, does it for every value and returns the result as a sequence. """ + assert is_iterable_typed(value, basestring) or isinstance(value, basestring) def ungrist_one (value): stripped = __re_grist_content.match (value) if not stripped: @@ -109,12 +115,15 @@ def replace_suffix (name, new_suffix): """ Replaces the suffix of name by new_suffix. If no suffix exists, the new one is added. """ + assert isinstance(name, basestring) + assert isinstance(new_suffix, basestring) split = os.path.splitext (name) return split [0] + new_suffix def forward_slashes (s): """ Converts all backslashes to forward slashes. """ + assert isinstance(s, basestring) return __re_backslash.sub ('/', s) @@ -122,6 +131,7 @@ def split_action_id (id): """ Splits an id in the toolset and specific rule parts. E.g. 'gcc.compile.c++' returns ('gcc', 'compile.c++') """ + assert isinstance(id, basestring) split = id.split ('.', 1) toolset = split [0] name = '' @@ -136,7 +146,7 @@ def os_name (): def platform (): return bjam.variable("OSPLAT") - + def os_version (): return bjam.variable("OSVER") diff --git a/test/BoostBuild.py b/test/BoostBuild.py index 540830e34..8c51cf05f 100644 --- a/test/BoostBuild.py +++ b/test/BoostBuild.py @@ -253,6 +253,8 @@ class Tester(TestCmd.TestCmd): elif os.uname()[0] == "Darwin": if os.uname()[4] == "i386": jam_build_dir = "bin.macosxx86" + elif os.uname()[4] == "x86_64": + jam_build_dir = "bin.macosxx86_64" else: jam_build_dir = "bin.macosxppc" elif os.uname()[0] == "AIX": diff --git a/test/alias.py b/test/alias.py index 4ff4d74d5..7ac4c6109 100644 --- a/test/alias.py +++ b/test/alias.py @@ -67,7 +67,7 @@ def test_alias_source_usage_requirements(t): Check whether usage requirements are propagated via "alias". In case they are not, linking will fail as there will be no main() function defined anywhere in the source. - + """ t.write("jamroot.jam", """\ lib l : l.cpp : : : WANT_MAIN ; diff --git a/test/builtin_glob_archive.py b/test/builtin_glob_archive.py new file mode 100644 index 000000000..d9eed3aef --- /dev/null +++ b/test/builtin_glob_archive.py @@ -0,0 +1,214 @@ +#!/usr/bin/python + +# Copyright 2014 Steven Watanabe +# Copyright 2015 Artur Shepilko +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +# This tests the GLOB_ARCHIVE rule. + +import os +import sys +import StringIO +import BoostBuild + +vms = ( os.name == 'posix' and sys.platform == 'OpenVMS') + +t = BoostBuild.Tester() + +## Setup test archive sources and symbols they contain. +sources = { + "a.cpp" : ["a"], + "b.cpp" : ["b"], + "b_match.cpp" : ["b_match"], + "c/nopath_check.cpp" : ["nopath_check"], + "CaseCheck.cpp" : ["CaseCheck"], + "seq_check1.cpp" : ["seq_check1"], + "seq_check2.cpp" : ["seq_check2"], + "seq_check3.cpp" : ["seq_check3"], + "symbols_check.c" : ["symbol", "symbol_match"], + "members_and_symbols_check.c" : ["member_and_symbol_match"], + "symbol_case_check.c" : ["SymbolCaseCheck"], + "main_check.cpp" : ["main"] +} + + +def create_sources(path, sources): + for s in sources : + f = os.path.join(path, s) + t.write(f, "") + output = StringIO.StringIO() + for sym in sources[s] : + output.write("int %s() { return 0; }\n" % sym) + t.write(f, output.getvalue()) + + +def setup_archive(name, sources): + global archive + global obj_suffix + archive = t.adjust_names(name)[0] + obj_suffix = t.adjust_names(".obj")[0] + output = StringIO.StringIO() + t.write("jamroot.jam","") + output.write("""\ +static-lib %s : +""" % name.split(".")[0]) + ## sort the sources, so we can test order of the globbed members + for s in sorted(sources) : + output.write("""\ + %s +""" % s) + output.write("""\ + ; +""") + t.write("lib/jamfile.jam", output.getvalue()) + create_sources("lib", sources) + t.run_build_system(subdir="lib") + built_archive = "lib/bin/$toolset/debug/%s" % name + t.expect_addition(built_archive) + t.copy(built_archive, name) + t.rm("lib") + + +def test_glob_archive(archives, glob, expected, sort_results = False): + output = StringIO.StringIO() + ## replace placeholders + glob = glob.replace("$archive1", archives[0]).replace("$obj", obj_suffix) + expected = [ m.replace("$archive1", + archives[0]).replace("$obj", obj_suffix) for m in expected ] + if len(archives) > 1 : + glob = glob.replace("$archive2", archives[1]).replace("$obj", obj_suffix) + expected = [ m.replace("$archive2", + archives[1]).replace("$obj", obj_suffix) for m in expected ] + ## create test jamfile + if sort_results : glob = "[ SORT %s ]" % glob + output.write("""\ + for local p in %s + { + ECHO $(p) ; + } + UPDATE ; + """ % glob) + t.write("file.jam", output.getvalue()) + ## run test jamfile and match against expected results + if sort_results : expected.sort() + t.run_build_system(["-ffile.jam"], stdout="\n".join(expected + [""])) + t.rm("file.jam") + + +## RUN TESTS +setup_archive("auxilliary1.lib", sources) +archive1 = archive +setup_archive("auxilliary2.lib", sources) +archive2 = archive + +## all arguments empty +test_glob_archive([archive1], "[ GLOB_ARCHIVE ]", []) + +## empty query +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : ]", []) + +## no-match +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : a ]", []) + +## match exact +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : a$obj ]", + ["$archive1(a$obj)"]) + +## glob wildcards:1 +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : b.* ]", + ["$archive1(b$obj)"]) + +## glob wildcards:2 +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : \\b?match[\.]* ]", + ["$archive1(b_match$obj)"]) + +## glob wildcards:3 +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : b* ]", + ["$archive1(b$obj)", "$archive1(b_match$obj)"]) + +## glob multiple patterns with multiple results. +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : b.* b_* ]", + ["$archive1(b$obj)", "$archive1(b_match$obj)"]) + +## glob multiple archives and patterns. +test_glob_archive([archive1, archive2], + "[ GLOB_ARCHIVE $archive1 $archive2 : b.* b_* ]", + ["$archive1(b$obj)", "$archive1(b_match$obj)", + "$archive2(b$obj)", "$archive2(b_match$obj)"]) + +## glob same archive multiple times. +test_glob_archive([archive1, archive1], + "[ GLOB_ARCHIVE $archive1 $archive2 $archive1 : b.* ]", + ["$archive1(b$obj)", "$archive2(b$obj)", "$archive1(b$obj)"]) + +## returned archive member has no path, even though its source object-file did. +## this is rather NT-specific, where members also store their object-file's path. +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : nopath_check$obj ]", + ["$archive1(nopath_check$obj)"]) + +## case insensitive matching, when archives support case sensitive member names. +## VMS implementation forces case-insensitive matching and downcased member names. + +case_sensitive_members = ( not vms ) + +if case_sensitive_members: + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : casecheck$obj : true ]", + ["$archive1(CaseCheck$obj)"]) +elif vms: + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : CaseCheck$obj : false ]", + ["$archive1(casecheck$obj)"]) + + +## test the order of matched members, in general it should match the +## insertion sequence. +test_glob_archive([archive1], "[ GLOB_ARCHIVE $archive1 : seq_check*$obj ]", + ["$archive1(seq_check1$obj)", "$archive1(seq_check2$obj)", + "$archive1(seq_check3$obj)"]) + + +## glob members by symbols they contain. +## Currently supported only on VMS. +symbol_glob_supported = ( vms ) + +if symbol_glob_supported : + ## NOTE: generated symbols are compiler-dependent and may be specifically + ## mangled (as in C++ case), so globbing by exact symbol is non-trivial. + ## However, C-generated symbols are likely to have more portable names, + ## so for the glob-by-symbol tests we glob C-generated archive members. + + ## glob members by exact symbol. + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : : : symbol ]", + ["$archive1(symbols_check$obj)"]) + + ## glob members by symbol wildcard. + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : : : symbol_* ]", + ["$archive1(symbols_check$obj)"]) + + ## glob members by member pattern AND symbol pattern. + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : *symbol* : : *member* ]", + ["$archive1(members_and_symbols_check$obj)"]) + + ## case insensitive symbol glob. + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : : true : symbolcasecheck ]", + ["$archive1(symbol_case_check$obj)"]) + + ## glob member that contains main symbol. + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : : : main _main ]", + ["$archive1(main_check$obj)"]) + +else: + test_glob_archive([archive1], + "[ GLOB_ARCHIVE $archive1 : : : symbol ]", + []) + + +t.cleanup() + diff --git a/test/bzip2.py b/test/bzip2.py new file mode 100755 index 000000000..b966eb718 --- /dev/null +++ b/test/bzip2.py @@ -0,0 +1,119 @@ +#!/usr/bin/python + +# Copyright (C) 2013 Steven Watanabe +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or copy at +# http://www.boost.org/LICENSE_1_0.txt) + +import BoostBuild +import MockToolset + +t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0) + +MockToolset.create(t) + +# Build from source +t.write("bzip2/bzlib.h", 'bzip2') +t.write("bzip2/blocksort.c", 'blocksort') + +t.write("Jamroot.jam", """ +path-constant here : . ; +using bzip2 : : $(here)/bzip2 ; +alias bzip2 : /bzip2//bzip2 : : static shared ; +""") + +MockToolset.set_expected(t, ''' +source_file('blocksort.c', 'blocksort') +action('-c -x c -I./bzip2 -o $blocksort.o $blocksort.c') +action('--dll $blocksort.o -o $bz2.so') +action('--archive $blocksort.o -o $bz2.a') +''') + +t.run_build_system() +t.expect_addition('bin/standalone/bzip2/mock/debug/bz2.dll') +t.expect_addition('bin/standalone/bzip2/mock/debug/link-static/bz2.lib') + +t.rm('bzip2') + +# Generic definitions that aren't configuration specific +common_stuff = ''' +source_file('test.cpp', 'test.cpp') +source_file('main.cpp', 'int main() {}') +source_file('bzlib.h.cpp', '#include ') +action('-c -x c++ $main.cpp -o $main.o') +''' +t.write('test.cpp', 'test.cpp') + +# Default initialization - static library +t.rm('bin') +t.write("Jamroot.jam", """ +path-constant here : . ; +using bzip2 ; +exe test : test.cpp /bzip2//bzip2 : : static shared ; +""") + +MockToolset.set_expected(t, common_stuff + ''' +action('$main.o --static-lib=bz2 -o $config.exe') +action('-c -x c++ $bzlib.h.cpp -o $bzlib.h.o') +action('-c -x c++ $test.cpp -o $test.o') +action('$test.o --static-lib=bz2 -o $test') +''') +t.run_build_system() +t.expect_addition('bin/mock/debug/test.exe') +t.expect_addition('bin/mock/debug/link-static/test.exe') + +# Default initialization - shared library +t.rm('bin') +t.write("Jamroot.jam", """ +path-constant here : . ; +using bzip2 ; +exe test : test.cpp /bzip2//bzip2 : : static shared ; +""") + +MockToolset.set_expected(t, common_stuff + ''' +action('$main.o --shared-lib=bz2 -o $config.exe') +action('-c -x c++ $bzlib.h.cpp -o $bzlib.h.o') +action('-c -x c++ $test.cpp -o $test.o') +action('$test.o --shared-lib=bz2 -o $test') +''') +t.run_build_system() +t.expect_addition('bin/mock/debug/test.exe') +t.expect_addition('bin/mock/debug/link-static/test.exe') + +# Initialization in explicit location - static library +t.rm('bin') +t.write("Jamroot.jam", """ +path-constant here : . ; +using bzip2 : : mybzlib $(here)/bzip2 $(here)/bzip2 ; +exe test : test.cpp /bzip2//bzip2 : : static shared ; +""") + +t.write('bzip2/bzlib.h', 'bzip2') + +MockToolset.set_expected(t, common_stuff + ''' +action('$main.o -L./bzip2 --static-lib=mybzlib -o $config.exe') +action('-c -x c++ $test.cpp -I./bzip2 -o $test.o') +action('$test.o -L./bzip2 --static-lib=mybzlib -o $test') +''') +t.run_build_system() +t.expect_addition('bin/mock/debug/test.exe') +t.expect_addition('bin/mock/debug/link-static/test.exe') + +# Initialization in explicit location - shared library +t.rm('bin') +t.write("Jamroot.jam", """ +path-constant here : . ; +using bzip2 : : mybzlib $(here)/bzip2 $(here)/bzip2 ; +exe test : test.cpp /bzip2//bzip2 : : static shared ; +""") + +MockToolset.set_expected(t, common_stuff + ''' +action('$main.o -L./bzip2 --shared-lib=mybzlib -o $config.exe') +action('-c -x c++ $test.cpp -I./bzip2 -o $test.o') +action('$test.o -L./bzip2 --shared-lib=mybzlib -o $test') +''') +t.run_build_system() +t.expect_addition('bin/mock/debug/test.exe') +t.expect_addition('bin/mock/debug/link-static/test.exe') + +t.cleanup() diff --git a/test/cli_property_expansion.py b/test/cli_property_expansion.py new file mode 100644 index 000000000..24c821617 --- /dev/null +++ b/test/cli_property_expansion.py @@ -0,0 +1,41 @@ +#!/usr/bin/python + +# Copyright 2015 Aaron Boman +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) + +# Test that free property inside. + +import BoostBuild + +t = BoostBuild.Tester(use_test_config=False) + +t.write("jamroot.jam", "") +t.write( + "subdir/build.jam", + """ + import feature ; + feature.feature my-feature : : free ; + """ +) +t.write( + "subdir/subsubdir/build.jam", + """ + exe hello : hello.c ; + """ +) +t.write( + "subdir/subsubdir/hello.c", + r""" + #include + + int main(int argc, char **argv){ + printf("%s\n", "Hello, World!"); + } + """ +) + +# run from the root directory +t.run_build_system(['subdir/subsubdir', 'my-feature="some value"']) + +t.cleanup() diff --git a/test/composite.py b/test/composite.py index 064c4087d..a35b88d1a 100644 --- a/test/composite.py +++ b/test/composite.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Test that composite properties are handled correctly. diff --git a/test/core-language/test.jam b/test/core-language/test.jam index 4198dd720..2853c736d 100644 --- a/test/core-language/test.jam +++ b/test/core-language/test.jam @@ -317,7 +317,7 @@ rule call-foreach ( values * ) } } -check-equal foreach-result : [ call-foreach 1 2 3 ] : ; +check-equal foreach-result : [ call-foreach 1 2 3 ] : 1 ; result = ; local varname = x ; @@ -406,7 +406,7 @@ rule test-rule } } -check-equal if-false-result : [ test-rule ] : ; +check-equal if-false-result : [ test-rule ] : result ; rule test-rule { @@ -1203,7 +1203,147 @@ rule test-rule } } -check-equal while-result-2 : [ test-rule ] : ; +check-equal while-result-2 : [ test-rule ] : x ; + +} + + +# +# test break +# + +{ + +local z = original ; +local done ; +while ! $(done) +{ + local z = inner ; + mark-order r1 ; + break ; + mark-order r2 ; + done = true ; +} + +check-order break-while-exec : r1 ; +check-equal break-while-cleanup : $(z) : original ; + +local values = v1 v2 ; + +for y in $(values) +{ + local z = inner ; + mark-order r1-$(y) ; + break ; + mark-order r2-$(y) ; +} + +check-order break-for-exec : r1-v1 ; +check-equal break-for-cleanup : $(z) : original ; + +for local y in $(values) +{ + local z = inner ; + mark-order r1-$(y) ; + break ; + mark-order r2-$(y) ; +} + +check-order break-for-local-exec : r1-v1 ; +check-equal break-for-local-cleanup : $(z) : original ; + +local z1 = z1val ; +local z2 = z2val ; +done = ; +while ! $(done) +{ + local z1 = z1new ; + mark-order r1 ; + for local y in $(values) + { + local z2 = z2new ; + mark-order r2 ; + break ; + mark-order r3 ; + } + mark-order r4 ; + break ; + mark-order r5 ; + done = true ; +} + +check-order break-nested-exec : r1 r2 r4 ; +check-equal break-nested-cleanup1 : $(z1) : z1val ; +check-equal break-nested-cleanup2 : $(z2) : z2val ; + +} + +# +# test continue +# + +{ + +local z = original ; +local done ; +while ! [ mark-order r1 : $(done) ] +{ + local z = inner ; + done = true ; + mark-order r2 ; + continue ; + mark-order r3 ; +} + +check-order continue-while-exec : r1 r2 r1 ; +check-equal continue-while-cleanup : $(z) : original ; + +local values = v1 v2 ; +for y in $(values) +{ + local z = inner ; + mark-order r1-$(y) ; + continue ; + mark-order r2-$(y) ; +} + +check-order continue-for-exec : r1-v1 r1-v2 ; +check-equal continue-for-cleanup : $(z) : original ; + +for local y in $(values) +{ + local z = inner ; + mark-order r1-$(y) ; + continue ; + mark-order r2-$(y) ; +} + +check-order continue-for-local-exec : r1-v1 r1-v2 ; +check-equal continue-for-local-cleanup : $(z) : original ; + +local z1 = z1val ; +local z2 = z2val ; +done = ; +while ! [ mark-order r1 : $(done) ] +{ + local z1 = z1new ; + done = true ; + mark-order r2 ; + for local y in $(values) + { + local z2 = z2new ; + mark-order r3-$(y) ; + continue ; + mark-order r4-$(y) ; + } + mark-order r5 ; + continue ; + mark-order r6 ; +} + +check-order continue-nested-exec : r1 r2 r3-v1 r3-v2 r5 r1 ; +check-equal continue-nested-cleanup1 : $(z1) : z1val ; +check-equal continue-nested-cleanup2 : $(z2) : z2val ; } @@ -1359,6 +1499,7 @@ if $(NT) { local c = "echo value" ; +if $(OS) = VMS { c = "PIPE WRITE SYS$OUTPUT \"value\"" ; } check-equal shell : "value\n" : [ SHELL $(c) ] ; check-equal shell : "" : [ SHELL $(c) : no-output ] ; diff --git a/test/core_bindrule.py b/test/core_bindrule.py index 3a6916afa..6ae4ab34c 100755 --- a/test/core_bindrule.py +++ b/test/core_bindrule.py @@ -1,9 +1,9 @@ #!/usr/bin/python -# Copyright 2001 Dave Abrahams +# Copyright 2001 Dave Abrahams # Copyright 2011 Steven Watanabe -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import BoostBuild import os diff --git a/test/core_dependencies.py b/test/core_dependencies.py index 2b2ef368d..cf9873cb4 100644 --- a/test/core_dependencies.py +++ b/test/core_dependencies.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # This tests correct handling of dependencies, specifically, on generated # sources, and from generated sources. @@ -30,7 +30,7 @@ DEPENDS a : b ; actions create-b { - echo '#include ' > $(<) + echo '#include ' > $(<) } copy a : b ; create-b b ; @@ -84,7 +84,7 @@ t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : foo.h ; " + code) t.fail_test(not correct_order(t.stdout())) # Now foo.h exists. Test include from b -> foo.h -> bar.h -> biz.h. b and foo.h -# already have updating actions. +# already have updating actions. t.rm(["a", "b"]) t.write("foo.h", "#include ") t.write("bar.h", "#include ") @@ -107,14 +107,14 @@ t.fail_test(not correct_order(t.stdout())) t.rm(["a", "biz.h"]) t.run_build_system("-d+2 -f-", stdin=" DEPENDS all : biz.h ; " + code) -t.fail_test(not correct_order(t.stdout())) +t.fail_test(not correct_order(t.stdout())) t.write("a", "") code=""" DEPENDS all : main d ; -actions copy +actions copy { cp $(>) $(<) ; } @@ -127,9 +127,9 @@ INCLUDES a : <1>c ; NOCARE <1>c ; SEARCH on <1>c = . ; -actions create-c +actions create-c { - echo d > $(<) + echo d > $(<) } actions create-d @@ -144,7 +144,7 @@ create-d d ; HDRSCAN on <1>c = (.*) ; HDRRULE on <1>c = hdrrule ; -rule hdrrule +rule hdrrule { INCLUDES $(1) : d ; } diff --git a/test/core_language.py b/test/core_language.py index 717e91ada..88a6d1934 100755 --- a/test/core_language.py +++ b/test/core_language.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2002, 2003 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2002, 2003 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import BoostBuild diff --git a/test/custom_generator.py b/test/custom_generator.py index 9a1188a03..8c477a6f2 100644 --- a/test/custom_generator.py +++ b/test/custom_generator.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2003, 2004, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003, 2004, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Attempt to declare a generator for creating OBJ from RC files. That generator # should be considered together with standard CPP->OBJ generators and @@ -14,11 +14,11 @@ import BoostBuild t = BoostBuild.Tester() -t.write("jamroot.jam", """ -import rcc ; +t.write("jamroot.jam", """ +import rcc ; """) -t.write("rcc.jam", """ +t.write("rcc.jam", """ import type ; import generators ; import print ; @@ -53,11 +53,11 @@ get_manager().engine().register_action( '@($(STDOUT):E=rc-object) > "$(<)"') """) -t.write("jamfile.jam", """ -obj r : r.rcc ; +t.write("jamfile.jam", """ +obj r : r.rcc ; """) -t.write("r.rcc", """ +t.write("r.rcc", """ """) t.run_build_system() diff --git a/test/default_build.py b/test/default_build.py index 6ad696ef0..f6c830210 100644 --- a/test/default_build.py +++ b/test/default_build.py @@ -19,7 +19,7 @@ t.run_build_system() t.expect_addition("bin/$toolset/debug/a.exe") t.expect_addition("bin/$toolset/release/a.exe") -# Check that explictly-specified build variant supresses default-build. +# Check that explictly-specified build variant suppresses default-build. t.rm("bin") t.run_build_system(["release"]) t.expect_addition(BoostBuild.List("bin/$toolset/release/") * "a.exe a.obj") diff --git a/test/ordered_include.py b/test/ordered_include.py index f91f81fe8..72ab0d3d7 100644 --- a/test/ordered_include.py +++ b/test/ordered_include.py @@ -95,22 +95,22 @@ def test_basic(): #include int main() {} """) - + tester.write("a/test1.hpp", """ """) - + tester.write("b/test2.hpp", """ """) - + tester.run_build_system() - + tester.expect_addition("bin/$toolset/debug/test.obj") - + # Check that the dependencies are correct tester.touch("a/test1.hpp") tester.run_build_system() tester.expect_touch("bin/$toolset/debug/test.obj") - + tester.touch("b/test2.hpp") tester.run_build_system() tester.expect_touch("bin/$toolset/debug/test.obj") diff --git a/test/path_features.py b/test/path_features.py index 774c16196..224dd3c2e 100644 --- a/test/path_features.py +++ b/test/path_features.py @@ -106,7 +106,7 @@ import remote/remote ; # If we set the folder_to_include property directly, it will work obj x1 : x.cpp : @attach-include-local ; -obj x2 : x.cpp : @remote/remote.attach-include-remote ; +obj x2 : x.cpp : @remote.attach-include-remote ; rule attach-include-local ( properties * ) { diff --git a/test/print.py b/test/print.py index 65caf95e1..6579bce54 100644 --- a/test/print.py +++ b/test/print.py @@ -1,9 +1,9 @@ #!/usr/bin/python -# Copyright 2003 Douglas Gregor -# Copyright 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2003 Douglas Gregor +# Copyright 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import BoostBuild diff --git a/test/project-test3/jamroot.jam b/test/project-test3/jamroot.jam index 8de43be51..3d4dfa19a 100644 --- a/test/project-test3/jamroot.jam +++ b/test/project-test3/jamroot.jam @@ -3,6 +3,7 @@ # (See accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) +import os ; import gcc ; import property ; @@ -42,5 +43,21 @@ actions yfc-link echo $(>) >> $(<) } +if [ os.name ] = VMS +{ + actions yfc-compile + { + PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W) + } + + actions yfc-link + { + PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W) + OPEN /APPEND FOUT $(<:W) + WRITE FOUT "$(>:J= ",")" + CLOSE FOUT + } +} IMPORT $(__name__) : yfc-compile yfc-link : : yfc-compile yfc-link ; diff --git a/test/project-test3/lib3/jamfile.jam b/test/project-test3/lib3/jamfile.jam index 0d457817e..261062994 100644 --- a/test/project-test3/lib3/jamfile.jam +++ b/test/project-test3/lib3/jamfile.jam @@ -7,6 +7,7 @@ project lib3 ; use-project /lib2/helper : ../lib2/helper ; +import os ; import property ; rule properties-as-path ( properties * ) @@ -34,4 +35,13 @@ actions mfc-compile echo $(>) >> $(<) } +if [ os.name ] = VMS +{ + actions mfc-compile + { + PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W) + } +} + make f.obj : f.cpp /lib2/helper//e.obj : mfc-compile ; diff --git a/test/project-test4/jamroot.jam b/test/project-test4/jamroot.jam index 801f0afb2..fbbe0abf1 100644 --- a/test/project-test4/jamroot.jam +++ b/test/project-test4/jamroot.jam @@ -1,8 +1,8 @@ -# Copyright 2002, 2003, 2005 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) - +# Copyright 2002, 2003, 2005 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +import os ; import gcc ; import property ; @@ -23,7 +23,7 @@ rule properties-as-path ( properties * ) rule yfc-compile ( target : sources * : property-set * ) { - PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ; + PROPERTIES on $(target) = [ properties-as-path $(property-set) ] ; } actions yfc-compile @@ -43,5 +43,21 @@ actions yfc-link echo $(>) >> $(<) } +if [ os.name ] = VMS +{ + actions yfc-compile + { + PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W) + PIPE WRITE SYS$OUTPUT "$(>:J= ",")" | APPEND /NEW SYS$INPUT $(<:W) + } + + actions yfc-link + { + PIPE WRITE SYS$OUTPUT "$(PROPERTIES)" | TYPE SYS$INPUT /OUT=$(<:W) + OPEN /APPEND FOUT $(<:W) + WRITE FOUT "$(>:J= ",")" + CLOSE FOUT + } +} IMPORT $(__name__) : yfc-compile yfc-link : : yfc-compile yfc-link ; diff --git a/test/qt4.py b/test/qt4.py index abb9594d5..170f6079b 100755 --- a/test/qt4.py +++ b/test/qt4.py @@ -1,8 +1,8 @@ #!/usr/bin/python # (c) Copyright Juergen Hunold 2008 -# Use, modification, and distribution are subject to the -# Boost Software License, Version 1.0. (See accompanying file +# Use, modification, and distribution are subject to the +# Boost Software License, Version 1.0. (See accompanying file # LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) import BoostBuild diff --git a/test/qt4/.gitignore b/test/qt4/.gitignore new file mode 100644 index 000000000..ba077a403 --- /dev/null +++ b/test/qt4/.gitignore @@ -0,0 +1 @@ +bin diff --git a/test/qt4/qtgui.cpp b/test/qt4/qtgui.cpp index 478e07a2a..75d9dacbe 100644 --- a/test/qt4/qtgui.cpp +++ b/test/qt4/qtgui.cpp @@ -27,7 +27,7 @@ struct Fixture QApplication application; }; -BOOST_GLOBAL_FIXTURE( Fixture ) +BOOST_GLOBAL_FIXTURE( Fixture ); BOOST_AUTO_TEST_CASE( defines) { diff --git a/test/qt4/qtxmlpatterns.cpp b/test/qt4/qtxmlpatterns.cpp index dcec92fd3..6835fdad8 100644 --- a/test/qt4/qtxmlpatterns.cpp +++ b/test/qt4/qtxmlpatterns.cpp @@ -33,7 +33,7 @@ struct Fixture QCoreApplication application; }; -BOOST_GLOBAL_FIXTURE( Fixture ) +BOOST_GLOBAL_FIXTURE( Fixture ); QByteArray doc("" "" diff --git a/test/qt5.py b/test/qt5.py index 75c4e670f..d9e1226e8 100755 --- a/test/qt5.py +++ b/test/qt5.py @@ -1,8 +1,8 @@ #!/usr/bin/python # (c) Copyright Juergen Hunold 2012 -# Use, modification, and distribution are subject to the -# Boost Software License, Version 1.0. (See accompanying file +# Use, modification, and distribution are subject to the +# Boost Software License, Version 1.0. (See accompanying file # LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) import BoostBuild diff --git a/test/qt5/.gitignore b/test/qt5/.gitignore new file mode 100644 index 000000000..ba077a403 --- /dev/null +++ b/test/qt5/.gitignore @@ -0,0 +1 @@ +bin diff --git a/test/qt5/jamroot.jam b/test/qt5/jamroot.jam index 90da392ed..7e50faf01 100644 --- a/test/qt5/jamroot.jam +++ b/test/qt5/jamroot.jam @@ -31,6 +31,8 @@ if [ qt5.initialized ] [ run qtscripttools.cpp /qt5//QtScriptTools ] [ run qtxmlpatterns.cpp /qt5//QtXmlPatterns ] + [ run qtpositioning.cpp /qt5//QtPositioning ] + # ToDo: runable example code [ link qtsvg.cpp /qt5//QtSvg ] [ link qtwidgets.cpp /qt5//QtWidgets ] @@ -44,7 +46,16 @@ if [ qt5.initialized ] [ link qtdeclarative.cpp /qt5//QtDeclarative ] # QtQuick version2 - [ run qtquick.cpp /qt5//QtQuick : -platform offscreen : $(CWD)/qtquick.qml ] + [ run qtquick.cpp /qt5//QtQuick : "--" -platform offscreen : $(CWD)/qtquick.qml ] + + [ run qtlocation.cpp /qt5//QtLocation ] + + [ run qtcharts.cpp /qt5//QtCharts ] + + [ run qt3dcore.cpp /qt5//Qt3DCore ] + [ run qt3drender.cpp /qt5//Qt3DRender ] + [ run qt3dinput.cpp /qt5//Qt3DInput ] + [ run qt3dlogic.cpp /qt5//Qt3DLogic ] # Help systems. [ link qthelp.cpp /qt5//QtHelp ] diff --git a/test/qt5/qt3dcore.cpp b/test/qt5/qt3dcore.cpp new file mode 100644 index 000000000..9d1871bdc --- /dev/null +++ b/test/qt5/qt3dcore.cpp @@ -0,0 +1,21 @@ +// (c) Copyright Juergen Hunold 2015 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE Qt3DCore +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true); +} + +BOOST_AUTO_TEST_CASE ( sample_code ) +{ + Qt3DCore::QTransform torusTransform; + torusTransform.setScale3D(QVector3D(1.5, 1, 0.5)); + torusTransform.setRotation(QQuaternion::fromAxisAndAngle(QVector3D(1, 0, 0), 45.0f)); +} diff --git a/test/qt5/qt3dinput.cpp b/test/qt5/qt3dinput.cpp new file mode 100644 index 000000000..46cee14a3 --- /dev/null +++ b/test/qt5/qt3dinput.cpp @@ -0,0 +1,24 @@ +// (c) Copyright Juergen Hunold 2015 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE Qt3DInput +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DINPUT_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DRENDER_LIB), true); +} + + +BOOST_AUTO_TEST_CASE ( sample_code ) +{ + Qt3DCore::QEntity rootEntity; + +} + diff --git a/test/qt5/qt3dlogic.cpp b/test/qt5/qt3dlogic.cpp new file mode 100644 index 000000000..088f42099 --- /dev/null +++ b/test/qt5/qt3dlogic.cpp @@ -0,0 +1,20 @@ +// (c) Copyright Juergen Hunold 2015 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE Qt3DLogic +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DLOGIC_LIB), true); +} + +BOOST_AUTO_TEST_CASE ( sample_code ) +{ + Qt3DLogic::QLogicAspect logicAspect; +} diff --git a/test/qt5/qt3drender.cpp b/test/qt5/qt3drender.cpp new file mode 100644 index 000000000..d4578054d --- /dev/null +++ b/test/qt5/qt3drender.cpp @@ -0,0 +1,21 @@ +// (c) Copyright Juergen Hunold 2015 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE Qt3DRender +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DCORE_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_3DRENDER_LIB), true); +} + +BOOST_AUTO_TEST_CASE ( sample_code ) +{ + Qt3DCore::QEntity rootEntity; + Qt3DRender::QMaterial material(&rootEntity); +} diff --git a/test/qt5/qtcharts.cpp b/test/qt5/qtcharts.cpp new file mode 100644 index 000000000..d29c4fd03 --- /dev/null +++ b/test/qt5/qtcharts.cpp @@ -0,0 +1,15 @@ +// (c) Copyright Juergen Hunold 2015 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE QtCharts +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_WIDGETS_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CHARTS_LIB), true); +} diff --git a/test/qt5/qtlocation.cpp b/test/qt5/qtlocation.cpp new file mode 100644 index 000000000..9806dca93 --- /dev/null +++ b/test/qt5/qtlocation.cpp @@ -0,0 +1,30 @@ +// (c) Copyright Juergen Hunold 2012 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE QtPositioning + +#include +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_POSITIONING_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_NETWORK_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_LOCATION_LIB), true); +} + +BOOST_TEST_DONT_PRINT_LOG_VALUE(QGeoAddress) + +BOOST_AUTO_TEST_CASE( geo_location ) +{ + QGeoLocation geolocation; + + QGeoAddress address; + + BOOST_CHECK_EQUAL(geolocation.address(), address); +} diff --git a/test/qt5/qtpositioning.cpp b/test/qt5/qtpositioning.cpp new file mode 100644 index 000000000..427b41ba9 --- /dev/null +++ b/test/qt5/qtpositioning.cpp @@ -0,0 +1,23 @@ +// (c) Copyright Juergen Hunold 2012 +// Use, modification and distribution is subject to the Boost Software +// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +#define BOOST_TEST_MODULE QtPositioning + +#include + +#include + +BOOST_AUTO_TEST_CASE (defines) +{ + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_CORE_LIB), true); + BOOST_CHECK_EQUAL(BOOST_IS_DEFINED(QT_POSITIONING_LIB), true); +} + +BOOST_AUTO_TEST_CASE( geo_coordinate ) +{ + QGeoCoordinate geocoordinate; + + BOOST_CHECK_EQUAL(geocoordinate.type(), QGeoCoordinate::InvalidCoordinate); +} diff --git a/test/qt5/qtwidgets.cpp b/test/qt5/qtwidgets.cpp index 002003448..b868240a5 100644 --- a/test/qt5/qtwidgets.cpp +++ b/test/qt5/qtwidgets.cpp @@ -27,7 +27,7 @@ struct Fixture QApplication application; }; -BOOST_GLOBAL_FIXTURE( Fixture ) +BOOST_GLOBAL_FIXTURE( Fixture ); BOOST_AUTO_TEST_CASE( defines) { diff --git a/test/qt5/qtxmlpatterns.cpp b/test/qt5/qtxmlpatterns.cpp index 9a9b85430..d87e3d3fe 100644 --- a/test/qt5/qtxmlpatterns.cpp +++ b/test/qt5/qtxmlpatterns.cpp @@ -33,7 +33,7 @@ struct Fixture QCoreApplication application; }; -BOOST_GLOBAL_FIXTURE( Fixture ) +BOOST_GLOBAL_FIXTURE( Fixture ); QByteArray doc("" "" diff --git a/test/relative_sources.py b/test/relative_sources.py index bd4620fc6..f36e0b097 100644 --- a/test/relative_sources.py +++ b/test/relative_sources.py @@ -17,7 +17,7 @@ t.write("src/a.cpp", "int main() {}\n") t.run_build_system() t.expect_addition("bin/$toolset/debug/src/a.obj") - + # Test that the relative path to source is preserved # when using 'glob'. t.rm("bin") diff --git a/test/test1.py b/test/test1.py index 05b396648..79d142221 100644 --- a/test/test1.py +++ b/test/test1.py @@ -1,8 +1,8 @@ #!/usr/bin/python -# Copyright 2002 Vladimir Prus -# Distributed under the Boost Software License, Version 1.0. -# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) +# Copyright 2002 Vladimir Prus +# Distributed under the Boost Software License, Version 1.0. +# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import BoostBuild diff --git a/test/test_all.py b/test/test_all.py index 2d552d3a1..9edaa8951 100644 --- a/test/test_all.py +++ b/test/test_all.py @@ -124,6 +124,9 @@ def run_tests(critical_tests, other_tests): FAIL: %d """ % (pass_count, failures_count) + # exit with failure with failures + if failures_count > 0: + sys.exit(1) def last_failed_test(): "Returns the name of the last failed test or None." @@ -167,7 +170,9 @@ tests = ["absolute_sources", "builtin_echo", "builtin_exit", "builtin_glob", + "builtin_glob_archive", "builtin_split_by_characters", + "bzip2", "c_file", "chain", "clean", @@ -284,8 +289,10 @@ if os.name == "posix": # it fails ;-). Further, the test relies on the fact that on Linux, one can # build a shared library with unresolved symbols. This is not true on # Windows, even with cygwin gcc. - if "CYGWIN" not in os.uname()[0]: - tests.append("library_order") + +# Disable this test until we figure how to address failures due to --as-needed being default now. +# if "CYGWIN" not in os.uname()[0]: +# tests.append("library_order") if toolset.startswith("gcc"): tests.append("gcc_runtime") diff --git a/test/testing_support.py b/test/testing_support.py index 01a7c4826..ad25b4aad 100755 --- a/test/testing_support.py +++ b/test/testing_support.py @@ -26,7 +26,7 @@ def test_files_with_spaces_in_their_name(): t.write("valid source.cpp", "int main() {}\n"); t.write("invalid source.cpp", "this is not valid source code"); - + t.write("jamroot.jam", """ import testing ; testing.compile "valid source.cpp" ;