mirror of
https://github.com/boostorg/build.git
synced 2026-02-17 13:42:14 +00:00
Squashed commit of the following:
commit 61f7def696caaddcf7213c1ff5c6dcfeb8faa3be Author: Rene Rivera <grafikrobot@gmail.com> Date: Tue Oct 17 08:36:10 2017 -0500 Undo doc print to qbk. commit 095f7807c42c3056862eb1db110b16f5739dcc10 Author: Rene Rivera <grafikrobot@gmail.com> Date: Tue Oct 17 08:09:54 2017 -0500 Fix test that collides with manpage type. commit d3eca8827d08c683d07a3224bd9670e5a4b9a300 Author: Rene Rivera <grafikrobot@gmail.com> Date: Mon Oct 16 22:39:44 2017 -0500 Merge branch 'feature/asciidoctor' of https://github.com/boostorg/build.git into feature/asciidoctor commit 65c780725357b706e942c173a0af0a6837fc4c88 Author: Rene Rivera <grafikrobot@gmail.com> Date: Mon Aug 07 21:41:24 2017 -0500 Initial support for asciidoctor tool. commit 0a5a12c16ae87f5694be91cd72439840710adce0 Author: Rene Rivera <grafikrobot@gmail.com> Date: Mon Aug 07 21:41:24 2017 -0500 Initial support for asciidoctor tool. Merge remote-tracking branch 'origin/develop' into feature/new-doc-format Conflicts: doc/src/overview.xml doc/src/reference.xml
This commit is contained in:
32
.travis.yml
32
.travis.yml
@@ -1,7 +1,9 @@
|
||||
sudo: false
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /feature\/.*/
|
||||
env:
|
||||
matrix:
|
||||
- TRAVIS_EMPTY_JOB_WORKAROUND=true
|
||||
@@ -9,15 +11,39 @@ matrix:
|
||||
exclude:
|
||||
- env: TRAVIS_EMPTY_JOB_WORKAROUND=true
|
||||
include:
|
||||
- os: linux
|
||||
dist: precise
|
||||
env: TOOLSET=gcc
|
||||
- os: linux
|
||||
dist: precise
|
||||
env: TOOLSET=clang
|
||||
- os: linux
|
||||
dist: trusty
|
||||
env: TOOLSET=gcc
|
||||
- os: linux
|
||||
dist: trusty
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode9.2
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode9.1
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode9
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode8.3
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode8
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode7.3
|
||||
env: TOOLSET=clang
|
||||
- os: osx
|
||||
osx_image: xcode6.4
|
||||
env: TOOLSET=clang
|
||||
language: cpp
|
||||
script:
|
||||
- cd src/engine
|
||||
|
||||
@@ -4,6 +4,11 @@
|
||||
#
|
||||
# Copyright Rene Rivera 2015-2017.
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /feature\/.*/
|
||||
|
||||
image:
|
||||
- Visual Studio 2017
|
||||
|
||||
@@ -101,16 +101,21 @@ rule set-default-toolset ( toolset : version ? )
|
||||
.default-toolset-version = $(version) ;
|
||||
}
|
||||
|
||||
rule set-pre-build-hook ( function )
|
||||
rule add-pre-build-hook ( function )
|
||||
{
|
||||
.pre-build-hook = $(function) ;
|
||||
.pre-build-hook += $(function) ;
|
||||
}
|
||||
|
||||
rule set-post-build-hook ( function )
|
||||
rule add-post-build-hook ( function )
|
||||
{
|
||||
.post-build-hook = $(function) ;
|
||||
.post-build-hook += $(function) ;
|
||||
}
|
||||
|
||||
# Old names for backwards compatibility
|
||||
IMPORT build-system : add-pre-build-hook : build-system : set-pre-build-hook ;
|
||||
IMPORT build-system : add-post-build-hook : build-system : set-post-build-hook ;
|
||||
EXPORT build-system : set-pre-build-hook set-post-build-hook ;
|
||||
|
||||
################################################################################
|
||||
#
|
||||
# Local rules.
|
||||
@@ -267,6 +272,35 @@ local rule load-config ( module-name : filename : path + : must-find ? )
|
||||
return $(where) ;
|
||||
}
|
||||
|
||||
# Parses options of the form --xxx-config=path/to/config.jam
|
||||
# and environmental variables of the form BOOST_BUILD_XXX_CONFIG.
|
||||
# If not found, returns an empty list. The option may be
|
||||
# explicitly set to the empty string, in which case, handle-config-option
|
||||
# will return "".
|
||||
#
|
||||
local rule handle-config-option ( name : env ? )
|
||||
{
|
||||
local result = [ MATCH ^--$(name)=(.*)$ : $(.argv) ] ;
|
||||
if ! $(result)-is-defined && $(env)
|
||||
{
|
||||
result = [ os.environ $(env) ] ;
|
||||
}
|
||||
# Special handling for the case when the OS does not strip the quotes
|
||||
# around the file name, as is the case when using Cygwin bash.
|
||||
result = [ utility.unquote $(result[-1]) ] ;
|
||||
if ! $(result)
|
||||
{
|
||||
return $(result) ;
|
||||
}
|
||||
# Treat explicitly entered user paths as native OS path
|
||||
# references and, if non-absolute, root them at the current
|
||||
# working directory.
|
||||
result = [ path.make $(result) ] ;
|
||||
result = [ path.root $(result) [ path.pwd ] ] ;
|
||||
result = [ path.native $(result) ] ;
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
|
||||
# Loads all the configuration files used by Boost Build in the following order:
|
||||
#
|
||||
@@ -277,11 +311,21 @@ local rule load-config ( module-name : filename : path + : must-find ? )
|
||||
# files will not be. If a relative path is specified, file is searched for in
|
||||
# the current folder.
|
||||
#
|
||||
# -- all-config --
|
||||
# Loaded only if specified on the command-line using the --config command
|
||||
# line option. If a file name is specified, it must exist and replaces all
|
||||
# other configuration files. If an empty file name is passed, no configuration
|
||||
# files will be loaded.
|
||||
#
|
||||
# -- site-config --
|
||||
# Always named site-config.jam. Will only be found if located on the system
|
||||
# root path (Windows), /etc (non-Windows), user's home folder or the Boost Build
|
||||
# path, in that order. Not loaded in case the test-config configuration file is
|
||||
# loaded or the --ignore-site-config command-line option is specified.
|
||||
# Named site-config.jam by default or may be named explicitly using the
|
||||
# --site-config command-line option. If named explicitly, the file is found
|
||||
# relative to the current working directory and must exist. If the default one
|
||||
# is used then it is searched for in the system root path (Windows),
|
||||
# /etc (non-Windows), user's home folder or the Boost Build path, in that
|
||||
# order. Not loaded in case the test-config configuration file is loaded,
|
||||
# the file is explicitly set to the empty string or the --ignore-site-config
|
||||
# command-line option is specified.
|
||||
#
|
||||
# -- user-config --
|
||||
# Named user-config.jam by default or may be named explicitly using the
|
||||
@@ -294,8 +338,12 @@ local rule load-config ( module-name : filename : path + : must-find ? )
|
||||
# file must exist.
|
||||
#
|
||||
# -- project-config --
|
||||
# Always named project-config.jam. Looked up in the current working folder and
|
||||
# then upwards through its parents up to the root folder.
|
||||
# Named project-config.jam. Looked up in the current working folder and
|
||||
# then upwards through its parents up to the root folder. It may also be
|
||||
# named explicitly using the --project-config command-line option. If a file
|
||||
# is specified explicitly, it is found relative to the current working
|
||||
# directory and must exist. If an empty file name is passed, project-config
|
||||
# will not be loaded.
|
||||
#
|
||||
# Test configurations have been added primarily for use by Boost Build's
|
||||
# internal unit testing system but may be used freely in other places as well.
|
||||
@@ -305,14 +353,11 @@ local rule load-configuration-files
|
||||
# Flag indicating that site configuration should not be loaded.
|
||||
local ignore-site-config =
|
||||
[ MATCH ^(--ignore-site-config)$ : $(.argv) ] ;
|
||||
local ignore-user-config ;
|
||||
local ignore-project-config ;
|
||||
|
||||
initialize-config-module test-config ;
|
||||
local test-config = [ MATCH ^--test-config=(.*)$ : $(.argv) ] ;
|
||||
local uq = [ MATCH \"(.*)\" : $(test-config) ] ;
|
||||
if $(uq)
|
||||
{
|
||||
test-config = $(uq) ;
|
||||
}
|
||||
local test-config = [ handle-config-option test-config ] ;
|
||||
if $(test-config)
|
||||
{
|
||||
local where = [ load-config test-config : $(test-config:BS) :
|
||||
@@ -325,12 +370,32 @@ local rule load-configuration-files
|
||||
ECHO "notice: be ignored due to the test configuration being"
|
||||
"loaded." ;
|
||||
}
|
||||
ignore-site-config = true ;
|
||||
ignore-user-config = true ;
|
||||
}
|
||||
else
|
||||
}
|
||||
|
||||
initialize-config-module all-config ;
|
||||
local all-config = [ handle-config-option config ] ;
|
||||
if $(all-config)
|
||||
{
|
||||
load-config all-config : $(all-config:D=) : $(all-config:D) : required ;
|
||||
if $(.debug-config)
|
||||
{
|
||||
test-config = ;
|
||||
ECHO "notice: Regular configuration files will be ignored due" ;
|
||||
ECHO "notice: to the global configuration being loaded." ;
|
||||
}
|
||||
}
|
||||
if $(all-config)-is-defined
|
||||
{
|
||||
if $(.debug-config) && ! $(all-config)
|
||||
{
|
||||
ECHO "notice: Configuration file loading explicitly disabled." ;
|
||||
}
|
||||
ignore-site-config = true ;
|
||||
ignore-user-config = true ;
|
||||
ignore-project-config = true ;
|
||||
}
|
||||
|
||||
local user-path = [ os.home-directories ] [ os.environ BOOST_BUILD_PATH ] ;
|
||||
local site-path = /etc $(user-path) ;
|
||||
@@ -339,55 +404,52 @@ local rule load-configuration-files
|
||||
site-path = [ modules.peek : SystemRoot ] $(user-path) ;
|
||||
}
|
||||
|
||||
if $(.debug-config) && ! $(test-config) && $(ignore-site-config)
|
||||
if $(.debug-config) && $(ignore-site-config) = --ignore-site-config
|
||||
{
|
||||
ECHO "notice: Site configuration files will be ignored due to the" ;
|
||||
ECHO "notice: --ignore-site-config command-line option." ;
|
||||
}
|
||||
|
||||
initialize-config-module site-config ;
|
||||
if ! $(test-config) && ! $(ignore-site-config)
|
||||
if ! $(ignore-site-config)
|
||||
{
|
||||
load-config site-config : site-config.jam : $(site-path) ;
|
||||
local site-config = [ handle-config-option site-config ] ;
|
||||
if $(site-config)
|
||||
{
|
||||
load-config site-config : $(site-config:D=) : $(site-config:D)
|
||||
: must-exist ;
|
||||
}
|
||||
else if ! $(site-config)-is-defined
|
||||
{
|
||||
load-config site-config : site-config.jam : $(site-path) ;
|
||||
}
|
||||
else if $(.debug-config)
|
||||
{
|
||||
ECHO notice: Site configuration file loading explicitly disabled. ;
|
||||
}
|
||||
}
|
||||
|
||||
initialize-config-module user-config ;
|
||||
if ! $(test-config)
|
||||
if ! $(ignore-user-config)
|
||||
{
|
||||
local user-config = [ MATCH ^--user-config=(.*)$ : $(.argv) ] ;
|
||||
user-config = $(user-config[-1]) ;
|
||||
user-config ?= [ os.environ BOOST_BUILD_USER_CONFIG ] ;
|
||||
# Special handling for the case when the OS does not strip the quotes
|
||||
# around the file name, as is the case when using Cygwin bash.
|
||||
user-config = [ utility.unquote $(user-config) ] ;
|
||||
local explicitly-requested = $(user-config) ;
|
||||
user-config ?= user-config.jam ;
|
||||
local user-config =
|
||||
[ handle-config-option user-config : BOOST_BUILD_USER_CONFIG ] ;
|
||||
|
||||
if $(user-config)
|
||||
{
|
||||
if $(explicitly-requested)
|
||||
if $(.debug-config)
|
||||
{
|
||||
# Treat explicitly entered user paths as native OS path
|
||||
# references and, if non-absolute, root them at the current
|
||||
# working directory.
|
||||
user-config = [ path.make $(user-config) ] ;
|
||||
user-config = [ path.root $(user-config) [ path.pwd ] ] ;
|
||||
user-config = [ path.native $(user-config) ] ;
|
||||
|
||||
if $(.debug-config)
|
||||
{
|
||||
ECHO notice: Loading explicitly specified user configuration
|
||||
file: ;
|
||||
ECHO " $(user-config)" ;
|
||||
}
|
||||
|
||||
load-config user-config : $(user-config:BS) : $(user-config:D)
|
||||
: must-exist ;
|
||||
}
|
||||
else
|
||||
{
|
||||
load-config user-config : $(user-config) : $(user-path) ;
|
||||
ECHO notice: Loading explicitly specified user configuration
|
||||
file: ;
|
||||
ECHO " $(user-config)" ;
|
||||
}
|
||||
|
||||
load-config user-config : $(user-config:D=) : $(user-config:D)
|
||||
: must-exist ;
|
||||
}
|
||||
else if ! $(user-config)-is-defined
|
||||
{
|
||||
load-config user-config : user-config.jam : $(user-path) ;
|
||||
}
|
||||
else if $(.debug-config)
|
||||
{
|
||||
@@ -401,15 +463,33 @@ local rule load-configuration-files
|
||||
# - We need to load project-config.jam before Jamroot
|
||||
# - We probably need to load project-config.jam even if there is no Jamroot
|
||||
# - e.g. to implement automake-style out-of-tree builds.
|
||||
local file = [ path.glob "." : project-config.jam ] ;
|
||||
if ! $(file)
|
||||
if ! $(ignore-project-config)
|
||||
{
|
||||
file = [ path.glob-in-parents "." : project-config.jam ] ;
|
||||
}
|
||||
if $(file)
|
||||
{
|
||||
initialize-config-module project-config : $(file:D) ;
|
||||
load-config project-config : project-config.jam : $(file:D) ;
|
||||
local project-config = [ handle-config-option project-config ] ;
|
||||
if $(project-config)
|
||||
{
|
||||
initialize-config-module project-config : $(project-config:D=) ;
|
||||
load-config project-config : $(project-config:D=)
|
||||
: $(project-config:D) : must-exist ;
|
||||
}
|
||||
else if ! $(project-config)-is-defined
|
||||
{
|
||||
local file = [ path.glob "." : project-config.jam ] ;
|
||||
if ! $(file)
|
||||
{
|
||||
file = [ path.glob-in-parents "." : project-config.jam ] ;
|
||||
}
|
||||
if $(file)
|
||||
{
|
||||
initialize-config-module project-config : $(file:D) ;
|
||||
load-config project-config : project-config.jam : $(file:D) ;
|
||||
}
|
||||
}
|
||||
else if $(.debug-config)
|
||||
{
|
||||
ECHO notice: Project configuration file loading explicitly
|
||||
disabled. ;
|
||||
}
|
||||
}
|
||||
|
||||
project.end-load ;
|
||||
@@ -458,7 +538,9 @@ local rule process-explicit-toolset-requests
|
||||
"previously configured; attempting to auto-configure now" ;
|
||||
}
|
||||
local t,v = [ MATCH ([^-]+)-?(.+)? : $(toolset) ] ;
|
||||
project.push-current ;
|
||||
toolset.using $(t,v[1]) : $(t,v[2]) ;
|
||||
project.pop-current ;
|
||||
}
|
||||
|
||||
# Make sure we get an appropriate property into the build request in
|
||||
@@ -963,9 +1045,9 @@ local rule should-clean-project ( project )
|
||||
{
|
||||
configure.print-configure-checks-summary ;
|
||||
|
||||
if $(.pre-build-hook)
|
||||
for local function in $(.pre-build-hook)
|
||||
{
|
||||
$(.pre-build-hook) ;
|
||||
$(function) ;
|
||||
}
|
||||
|
||||
DEPENDS all : $(actual-targets) ;
|
||||
@@ -980,9 +1062,9 @@ local rule should-clean-project ( project )
|
||||
{
|
||||
UPDATE_NOW $(.out-xml) : : ignore-minus-n ;
|
||||
}
|
||||
if $(.post-build-hook)
|
||||
for local function in $(.post-build-hook)
|
||||
{
|
||||
$(.post-build-hook) $(ok) ;
|
||||
$(function) $(ok) ;
|
||||
}
|
||||
# Prevent automatic update of the 'all' target, now that we have
|
||||
# explicitly updated what we wanted.
|
||||
|
||||
@@ -11,6 +11,7 @@ import modules ;
|
||||
import "class" ;
|
||||
import errors ;
|
||||
import configure ;
|
||||
import feature ;
|
||||
import project ;
|
||||
import virtual-target ;
|
||||
import generators ;
|
||||
@@ -22,11 +23,21 @@ project.initialize $(__name__) ;
|
||||
.project = [ project.current ] ;
|
||||
project ac ;
|
||||
|
||||
feature.feature ac.print-text : : free ;
|
||||
|
||||
rule generate-include ( target : sources * : properties * )
|
||||
{
|
||||
local header = [ property.select <include> : $(properties) ] ;
|
||||
print.output $(target) ;
|
||||
print.text "#include <$(header:G=)>\n" : true ;
|
||||
local text = [ property.select <ac.print-text> : $(properties) ] ;
|
||||
if $(text)
|
||||
{
|
||||
print.text $(text:G=) : true ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local header = [ property.select <include> : $(properties) ] ;
|
||||
print.text "#include <$(header:G=)>\n" : true ;
|
||||
}
|
||||
}
|
||||
|
||||
rule generate-main ( target : sources * : properties * )
|
||||
@@ -35,7 +46,7 @@ rule generate-main ( target : sources * : properties * )
|
||||
print.text "int main() {}" : true ;
|
||||
}
|
||||
|
||||
rule find-include-path ( properties : header : provided-path ? )
|
||||
rule find-include-path ( properties : header : provided-path ? : test-source ? )
|
||||
{
|
||||
if $(provided-path) && [ path.exists [ path.root $(header) $(provided-path) ] ]
|
||||
{
|
||||
@@ -43,21 +54,23 @@ rule find-include-path ( properties : header : provided-path ? )
|
||||
}
|
||||
else
|
||||
{
|
||||
local a = [ class.new action : ac.generate-include : [ property-set.create <include>$(header) ] ] ;
|
||||
local a = [ class.new action : ac.generate-include : [ property-set.create <include>$(header) <ac.print-text>$(test-source) ] ] ;
|
||||
# Create a new CPP target named after the header.
|
||||
# Replace dots (".") in target basename for portability.
|
||||
local basename = [ regex.replace $(header:D=) "[.]" "_" ] ;
|
||||
local header-target = $(header:S=:B=$(basename)) ;
|
||||
local cpp = [ class.new file-target $(header-target:S=.cpp) exact : CPP : $(.project) : $(a) ] ;
|
||||
cpp = [ virtual-target.register $(cpp) ] ;
|
||||
$(cpp).root true ;
|
||||
local result = [ generators.construct $(.project) $(header-target) : OBJ : $(properties) : $(cpp) : true ] ;
|
||||
configure.maybe-force-rebuild $(result[2-]) ;
|
||||
local jam-targets ;
|
||||
for t in $(result[2-])
|
||||
for local t in $(result[2-])
|
||||
{
|
||||
jam-targets += [ $(t).actualize ] ;
|
||||
}
|
||||
if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
|
||||
: ignore-minus-n : ignore-minus-q ]
|
||||
: ignore-minus-n ]
|
||||
{
|
||||
return %default ;
|
||||
}
|
||||
@@ -66,7 +79,6 @@ rule find-include-path ( properties : header : provided-path ? )
|
||||
|
||||
rule construct-library ( name : property-set : provided-path ? )
|
||||
{
|
||||
property-set = [ $(property-set).refine [ property-set.create $(link-opt) ] ] ;
|
||||
local lib-props = [ $(property-set).add-raw <name>$(name) <search>$(provided-path) ] ;
|
||||
return [ generators.construct $(.project) lib-$(name)
|
||||
: SEARCHED_LIB : $(lib-props) : : true ] ;
|
||||
@@ -96,16 +108,18 @@ rule find-library ( properties : names + : provided-path ? )
|
||||
[ property-set.empty ] ] ;
|
||||
local main.cpp = [ virtual-target.register
|
||||
[ class.new file-target main-$(name).cpp exact : CPP : $(.project) : $(a) ] ] ;
|
||||
$(main.cpp).root true ;
|
||||
local test = [ generators.construct $(.project) $(name) : EXE
|
||||
: [ $(properties).add $(lib[1]) ] : $(main.cpp) $(lib[2-])
|
||||
: true ] ;
|
||||
configure.maybe-force-rebuild $(test[2-]) ;
|
||||
local jam-targets ;
|
||||
for t in $(test[2-])
|
||||
{
|
||||
jam-targets += [ $(t).actualize ] ;
|
||||
}
|
||||
if [ UPDATE_NOW $(jam-targets) : [ modules.peek configure : .log-fd ]
|
||||
: ignore-minus-n : ignore-minus-q ]
|
||||
: ignore-minus-n ]
|
||||
{
|
||||
result = $(name) $(link-opts[1]) ;
|
||||
names-iter = ; link-opts = ; # break
|
||||
@@ -143,6 +157,11 @@ class ac-library : basic-target
|
||||
self.default-names = $(names) ;
|
||||
}
|
||||
|
||||
rule set-header-test ( source )
|
||||
{
|
||||
self.header-test = $(source) ;
|
||||
}
|
||||
|
||||
rule reconfigure ( include-path ? : library-path ? : library-name ? )
|
||||
{
|
||||
if $(include-path) || $(library-path) || $(library-name)
|
||||
@@ -205,10 +224,7 @@ class ac-library : basic-target
|
||||
library-path ?= [ modules.peek : $(name:U)_LIBPATH ] ;
|
||||
}
|
||||
|
||||
local toolset = [ $(property-set).get <toolset> ] ;
|
||||
local toolset-version-property = "<toolset-$(toolset):version>" ;
|
||||
local relevant = [ property.select <target-os> <toolset>
|
||||
$(toolset-version-property) <link> <address-model> <architecture> :
|
||||
local relevant = [ property.select [ configure.get-relevant-features ] <link> :
|
||||
[ $(property-set).raw ] ] ;
|
||||
|
||||
local key = ac-library-$(name)-$(relevant:J=-) ;
|
||||
@@ -236,7 +252,7 @@ class ac-library : basic-target
|
||||
}
|
||||
else
|
||||
{
|
||||
local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) ] ;
|
||||
local includes = [ ac.find-include-path $(property-set) : $(self.header) : $(include-path) : $(self.header-test) ] ;
|
||||
local library = [ ac.find-library $(property-set) : $(libnames) : $(library-path) ] ;
|
||||
if $(includes) && $(library)
|
||||
{
|
||||
@@ -300,5 +316,8 @@ rule check-library ( target : true-properties * : false-properties * )
|
||||
{
|
||||
local instance = [ class.new check-library-worker $(target) :
|
||||
$(true-properties) : $(false-properties) ] ;
|
||||
return <conditional>@$(instance).check ;
|
||||
return <conditional>@$(instance).check
|
||||
[ property.evaluate-conditional-relevance
|
||||
$(true-properties) $(false-properties)
|
||||
: [ configure.get-relevant-features ] <link> ] ;
|
||||
}
|
||||
|
||||
@@ -38,6 +38,23 @@ rule expand-no-defaults ( property-sets * )
|
||||
}
|
||||
|
||||
|
||||
# Update the list of expected conflicts based on the new
|
||||
# features.
|
||||
#
|
||||
local rule remove-conflicts ( conflicts * : features * )
|
||||
{
|
||||
local result ;
|
||||
for local c in $(conflicts)
|
||||
{
|
||||
if ! [ set.intersection [ regex.split $(c) "/" ] : $(features) ]
|
||||
{
|
||||
result += $(c) ;
|
||||
}
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
|
||||
# Implementation of x-product, below. Expects all the project files to already
|
||||
# be loaded.
|
||||
#
|
||||
@@ -47,36 +64,54 @@ local rule x-product-aux ( property-sets + )
|
||||
local p = [ feature.split $(property-sets[1]) ] ;
|
||||
local f = [ set.difference $(p:G) : [ feature.free-features ] ] ;
|
||||
local seen ;
|
||||
local extra-conflicts ;
|
||||
|
||||
# No conflict with things used at a higher level?
|
||||
if ! [ set.intersection $(f) : $(x-product-used) ]
|
||||
{
|
||||
local x-product-seen ;
|
||||
local x-product-conflicts =
|
||||
[ remove-conflicts $(x-product-conflicts) : $(f) ] ;
|
||||
{
|
||||
# Do not mix in any conflicting features.
|
||||
local x-product-used = $(x-product-used) $(f) ;
|
||||
|
||||
if $(property-sets[2])
|
||||
{
|
||||
local rest = [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
|
||||
local rest = [ x-product-aux $(property-sets[2-]) ] ;
|
||||
result = $(property-sets[1])/$(rest) ;
|
||||
}
|
||||
|
||||
result ?= $(property-sets[1]) ;
|
||||
if ! $(x-product-conflicts)
|
||||
{
|
||||
result ?= $(property-sets[1]) ;
|
||||
}
|
||||
}
|
||||
|
||||
# If we did not encounter a conflicting feature lower down, do not
|
||||
# recurse again.
|
||||
if ! [ set.intersection $(f) : $(x-product-seen) ]
|
||||
if ! [ set.intersection $(f) : $(x-product-seen) ]
|
||||
|| [ remove-conflicts $(x-product-conflicts) : $(x-product-seen) ]
|
||||
{
|
||||
property-sets = ;
|
||||
}
|
||||
else
|
||||
{
|
||||
# A property is only allowed to be absent if it conflicts
|
||||
# with either a higher or lower layer. We don't need to
|
||||
# bother setting this if we already know that we don't need
|
||||
# to recurse again.
|
||||
extra-conflicts = $(f:J=/) ;
|
||||
}
|
||||
|
||||
seen = $(x-product-seen) ;
|
||||
}
|
||||
|
||||
if $(property-sets[2])
|
||||
{
|
||||
result += [ x-product-aux $(property-sets[2-]) : $(feature-space) ] ;
|
||||
# Lower layers expansion must conflict with this
|
||||
local x-product-conflicts = $(x-product-conflicts) $(extra-conflicts) ;
|
||||
|
||||
result += [ x-product-aux $(property-sets[2-]) ] ;
|
||||
}
|
||||
|
||||
# Note that we have seen these features so that higher levels will recurse
|
||||
@@ -90,13 +125,21 @@ local rule x-product-aux ( property-sets + )
|
||||
# contain conflicting values for single-valued features. Expects all the project
|
||||
# files to already be loaded.
|
||||
#
|
||||
# Formal definition:
|
||||
# Returns all maximum non-conflicting subsets of property-sets.
|
||||
# The result is a list of all property-sets p such that
|
||||
# 1. p is composed by joining a subset of property-sets without removing
|
||||
# duplicates
|
||||
# 2. p contains at most one instance of every single-valued feature
|
||||
# 3. Adding any additional element of property-sets to p be would
|
||||
# violate (2)
|
||||
local rule x-product ( property-sets * )
|
||||
{
|
||||
if $(property-sets).non-empty
|
||||
{
|
||||
# Prepare some "scoped globals" that can be used by the implementation
|
||||
# function, x-product-aux.
|
||||
local x-product-seen x-product-used ;
|
||||
local x-product-seen x-product-used x-product-conflicts ;
|
||||
return [ x-product-aux $(property-sets) : $(feature-space) ] ;
|
||||
}
|
||||
# Otherwise return empty.
|
||||
|
||||
@@ -67,6 +67,9 @@ rule load ( cache-file )
|
||||
cache-file = [ path.native $(cache-file) ] ;
|
||||
if [ path.exists $(cache-file) ] && ! ( --reconfigure in [ modules.peek : ARGV ] )
|
||||
{
|
||||
FILE_CONTENTS on <old-cache-file>$(cache-file) = "" ;
|
||||
config-cache.write <old-cache-file>$(cache-file) ;
|
||||
UPDATE_NOW <old-cache-file>$(cache-file) : [ modules.peek configure : .log-fd ] ;
|
||||
include <old-cache-file>$(cache-file) ;
|
||||
}
|
||||
.cache-file = $(cache-file) ;
|
||||
|
||||
@@ -20,7 +20,9 @@ import targets ;
|
||||
import config-cache ;
|
||||
import feature ;
|
||||
import modules ;
|
||||
import sequence ;
|
||||
import utility ;
|
||||
import virtual-target ;
|
||||
|
||||
|
||||
# The configure feature allows external definition of what features are
|
||||
@@ -144,6 +146,28 @@ rule print-configure-checks-summary ( )
|
||||
}
|
||||
}
|
||||
|
||||
if --reconfigure in [ modules.peek : ARGV ]
|
||||
{
|
||||
.reconfigure = true ;
|
||||
}
|
||||
|
||||
# Handle the --reconfigure option
|
||||
rule maybe-force-rebuild ( targets * )
|
||||
{
|
||||
if $(.reconfigure)
|
||||
{
|
||||
local all-targets ;
|
||||
for local t in $(targets)
|
||||
{
|
||||
all-targets += [ virtual-target.traverse $(t) ] ;
|
||||
}
|
||||
for local t in [ sequence.unique $(all-targets) ]
|
||||
{
|
||||
$(t).always ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Attempts to build a set of virtual targets
|
||||
rule try-build ( targets * : ps : what : retry ? )
|
||||
{
|
||||
@@ -154,6 +178,8 @@ rule try-build ( targets * : ps : what : retry ? )
|
||||
local result ;
|
||||
local jam-targets ;
|
||||
|
||||
maybe-force-rebuild $(targets) ;
|
||||
|
||||
for local t in $(targets)
|
||||
{
|
||||
jam-targets += [ $(t).actualize ] ;
|
||||
@@ -206,6 +232,76 @@ rule try-build ( targets * : ps : what : retry ? )
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
# Attempts to build several sets of virtual targets. Returns the
|
||||
# the index of the first set that builds.
|
||||
rule try-find-build ( ps : what : * )
|
||||
{
|
||||
local args = 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
|
||||
# The outer layer only needs to check $(what), but we
|
||||
# also need to check the individual elements, in case
|
||||
# the set of targets has changed since the last build.
|
||||
local cache-name = $(what) $($(args)[1]) [ $(ps).raw ] ;
|
||||
cache-name = $(cache-name:J=-) ;
|
||||
local value = [ config-cache.get $(cache-name) ] ;
|
||||
|
||||
local result ;
|
||||
local jam-targets ;
|
||||
|
||||
maybe-force-rebuild $($(args)[2-]) ;
|
||||
|
||||
if $(value)
|
||||
{
|
||||
local none = none ; # What to show when the argument
|
||||
local name = $(value) ;
|
||||
if $(name) != none
|
||||
{
|
||||
name = [ CALC $(name) + 2 ] ;
|
||||
}
|
||||
local x = [ PAD " - $(what)" : $(.width) ] ;
|
||||
local y = [ PAD $($(name)[1]) : 3 ] ;
|
||||
result = $(value) ;
|
||||
log-check-result "$(x) : $(y) (cached)" ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local x = [ PAD " - $(what)" : $(.width) ] ;
|
||||
for local i in $(args)
|
||||
{
|
||||
local jam-targets ;
|
||||
for local t in $($(i)[2-])
|
||||
{
|
||||
jam-targets += [ $(t).actualize ] ;
|
||||
}
|
||||
if [ UPDATE_NOW $(jam-targets) :
|
||||
$(.log-fd) : ignore-minus-n : ignore-minus-q ]
|
||||
{
|
||||
result = [ CALC $(i) - 2 ] ;
|
||||
log-check-result "$(x) : $($(i)[1])" ;
|
||||
break ;
|
||||
}
|
||||
}
|
||||
if ! $(result)
|
||||
{
|
||||
result = none ;
|
||||
}
|
||||
}
|
||||
if ! $(value)
|
||||
{
|
||||
if $(result)
|
||||
{
|
||||
config-cache.set $(cache-name) : $(result) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
config-cache.set $(cache-name) : $(result) ;
|
||||
}
|
||||
}
|
||||
if $(result) != none
|
||||
{
|
||||
return $(result) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Attempt to build a metatarget named by 'metatarget-reference'
|
||||
# in context of 'project' with properties 'ps'.
|
||||
# Returns non-empty value if build is OK.
|
||||
@@ -232,7 +328,69 @@ rule builds-raw ( metatarget-reference : project : ps : what : retry ? )
|
||||
}
|
||||
}
|
||||
|
||||
local rule get-relevant-features ( )
|
||||
# Attempt to build a metatarget named by 'metatarget-reference'
|
||||
# in context of 'project' with properties 'ps'.
|
||||
# Returns the 1-based index of the first target
|
||||
# that builds.
|
||||
rule find-builds-raw ( project : ps : what : * )
|
||||
{
|
||||
local result ;
|
||||
local args = 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
|
||||
|
||||
if ! $(.$(what)-tested.$(ps))
|
||||
{
|
||||
.$(what)-tested.$(ps) = true ;
|
||||
local targets.$(i) what.$(i) ;
|
||||
for local i in $(args)
|
||||
{
|
||||
if ! $($(i))
|
||||
{
|
||||
break ;
|
||||
}
|
||||
targets.$(i) = [ targets.generate-from-reference
|
||||
$($(i)[1]) : $(project) : $(ps) ] ;
|
||||
# ignore usage requirements
|
||||
targets.$(i) = $(targets.$(i)[2-]) ;
|
||||
if $($(i)[2])
|
||||
{
|
||||
what.$(i) = $($(i)[2]) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local t = [ targets.resolve-reference
|
||||
$($(i)[1]) : $(project) ] ;
|
||||
what.$(i) = [ $(t[1]).name ] ;
|
||||
}
|
||||
}
|
||||
|
||||
result = [ try-find-build $(ps) : $(what)
|
||||
: $(what.4) $(targets.4)
|
||||
: $(what.5) $(targets.5)
|
||||
: $(what.6) $(targets.6)
|
||||
: $(what.7) $(targets.7)
|
||||
: $(what.8) $(targets.8)
|
||||
: $(what.9) $(targets.9)
|
||||
: $(what.10) $(targets.10)
|
||||
: $(what.11) $(targets.11)
|
||||
: $(what.12) $(targets.12)
|
||||
: $(what.13) $(targets.13)
|
||||
: $(what.14) $(targets.14)
|
||||
: $(what.15) $(targets.15)
|
||||
: $(what.16) $(targets.16)
|
||||
: $(what.17) $(targets.17)
|
||||
: $(what.18) $(targets.18)
|
||||
: $(what.19) $(targets.19) ] ;
|
||||
.$(what)-result.$(ps) = $(result) ;
|
||||
|
||||
return $(result) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
return $(.$(what)-result.$(ps)) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule get-relevant-features ( )
|
||||
{
|
||||
local relevant = [ feature.expand <configure> ] ;
|
||||
local result = ;
|
||||
@@ -270,6 +428,19 @@ rule builds ( metatarget-reference : properties * : what ? : retry ? )
|
||||
$(retry) ] ;
|
||||
}
|
||||
|
||||
rule find-builds ( what : properties * : * )
|
||||
{
|
||||
local relevant = [ property.select [ get-relevant-features ] : $(properties) ] ;
|
||||
local ps = [ property-set.create $(relevant) ] ;
|
||||
local t = [ targets.current ] ;
|
||||
local p = [ $(t).project ] ;
|
||||
|
||||
return [ find-builds-raw $(p) : $(ps) : $(what) :
|
||||
$(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) :
|
||||
$(10) : $(11) : $(12) : $(13) : $(14) : $(15) :
|
||||
$(16) : $(17) : $(18) : $(19) ] ;
|
||||
}
|
||||
|
||||
|
||||
# Called by Boost.Build startup code to specify the file to receive the
|
||||
# configuration check results. Should never be called by user code.
|
||||
@@ -314,13 +485,89 @@ class check-target-builds-worker
|
||||
}
|
||||
}
|
||||
|
||||
class configure-choose-worker
|
||||
{
|
||||
import configure ;
|
||||
import property ;
|
||||
rule __init__ ( message : * )
|
||||
{
|
||||
self.message = $(message) ;
|
||||
for i in 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
|
||||
{
|
||||
local name = [ CALC $(i) - 1 ] ;
|
||||
self.targets.$(name) = $($(i)[1]) ;
|
||||
if ! $($(i)[2]:G) # Check whether the second argument is a property
|
||||
{
|
||||
self.what.$(name) = $($(i)[2]) ;
|
||||
self.props.$(name) = $($(i)[3-]) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
self.props.$(name) = $($(i)[2-]) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
rule all-properties ( )
|
||||
{
|
||||
local i = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 ;
|
||||
return $(self.props.$(i)) ;
|
||||
}
|
||||
rule check ( properties * )
|
||||
{
|
||||
local i = [ configure.find-builds $(self.message) : $(properties)
|
||||
: $(self.targets.1) $(self.what.1)
|
||||
: $(self.targets.2) $(self.what.2)
|
||||
: $(self.targets.3) $(self.what.3)
|
||||
: $(self.targets.4) $(self.what.4)
|
||||
: $(self.targets.5) $(self.what.5)
|
||||
: $(self.targets.6) $(self.what.6)
|
||||
: $(self.targets.7) $(self.what.7)
|
||||
: $(self.targets.8) $(self.what.8)
|
||||
: $(self.targets.9) $(self.what.9)
|
||||
: $(self.targets.10) $(self.what.10)
|
||||
: $(self.targets.11) $(self.what.11)
|
||||
: $(self.targets.12) $(self.what.12)
|
||||
: $(self.targets.13) $(self.what.13)
|
||||
: $(self.targets.14) $(self.what.14)
|
||||
: $(self.targets.15) $(self.what.15)
|
||||
: $(self.targets.16) $(self.what.16)
|
||||
: $(self.targets.17) $(self.what.17)
|
||||
: $(self.targets.18) $(self.what.18)
|
||||
: $(self.targets.19) $(self.what.19) ] ;
|
||||
if $(self.props.$(i))
|
||||
{
|
||||
return [ property.evaluate-conditionals-in-context $(self.props.$(i)) : $(properties) ] ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
rule check-target-builds ( target message ? : true-properties * :
|
||||
false-properties * )
|
||||
{
|
||||
local instance = [ new check-target-builds-worker $(target) $(message) :
|
||||
$(true-properties) : $(false-properties) ] ;
|
||||
return <conditional>@$(instance).check ;
|
||||
return <conditional>@$(instance).check
|
||||
[ property.evaluate-conditional-relevance
|
||||
$(true-properties) $(false-properties)
|
||||
: [ configure.get-relevant-features ] ] ;
|
||||
}
|
||||
|
||||
# Usage:
|
||||
# [ configure.choose "architecture"
|
||||
# : /config//x86 x86 <architecture>x86
|
||||
# : /config//mips mips <architecture>mips
|
||||
# ]
|
||||
rule choose ( message : * )
|
||||
{
|
||||
local instance = [ new configure-choose-worker $(message)
|
||||
: $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9)
|
||||
: $(10) : $(11) : $(12) : $(13) : $(14) : $(15) : $(16)
|
||||
: $(17) : $(18) : $(19) ] ;
|
||||
return <conditional>@$(instance).check
|
||||
[ property.evaluate-conditional-relevance
|
||||
[ $(instance).all-properties ]
|
||||
: [ configure.get-relevant-features ] ] ;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -289,7 +289,8 @@ local rule find-implied-subfeature ( feature subvalue : value-string ? )
|
||||
import errors ;
|
||||
errors.error invalid feature $(feature) ;
|
||||
}
|
||||
return $($(feature)$(value-string:E="")<>$(subvalue).subfeature) ;
|
||||
value-string += "" ;
|
||||
return $($(feature)$(value-string)<>$(subvalue).subfeature) ;
|
||||
}
|
||||
|
||||
|
||||
@@ -371,12 +372,8 @@ local rule expand-subfeatures-aux (
|
||||
|
||||
local result = $(components[1]:G=$(feature)) ;
|
||||
|
||||
local subvalues = $(components[2-]) ;
|
||||
while $(subvalues)
|
||||
for local subvalue in $(components[2-])
|
||||
{
|
||||
local subvalue = $(subvalues[1]) ; # Pop the head off of subvalues.
|
||||
subvalues = $(subvalues[2-]) ;
|
||||
|
||||
local subfeature = [ find-implied-subfeature $(feature) $(subvalue) :
|
||||
$(value) ] ;
|
||||
|
||||
@@ -385,7 +382,7 @@ local rule expand-subfeatures-aux (
|
||||
{
|
||||
result = $(components:J=-) ;
|
||||
result = $(result:G=$(feature)) ;
|
||||
subvalues = ; # Stop looping.
|
||||
break ;
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -665,6 +662,10 @@ rule subfeature (
|
||||
local f = [ utility.ungrist $(feature) ] ;
|
||||
feature $(f)-$(subfeature-name) : $(subvalues) : $(attributes) subfeature ;
|
||||
|
||||
# Features and subfeatures are always relevent as a group
|
||||
.feature-dependencies.$(f) += $(f)-$(subfeature-name) ;
|
||||
.feature-dependencies.$(f)-$(subfeature-name) += $(f) ;
|
||||
|
||||
# Now make sure the subfeature values are known.
|
||||
extend-subfeature $(feature) $(value-string) : $(subfeature) : $(subvalues) ;
|
||||
}
|
||||
@@ -695,6 +696,10 @@ rule compose ( composite-property : component-properties * )
|
||||
errors.error composite property "$(composite-property)" cannot have itself as a component ;
|
||||
}
|
||||
$(composite-property).components = $(component-properties) ;
|
||||
|
||||
# A composite feature is relevant if any composed feature is relevant
|
||||
local component-features = [ sequence.transform utility.ungrist : $(component-properties:G) ] ;
|
||||
.feature-dependencies.$(component-features) += [ utility.ungrist $(feature) ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -1043,6 +1048,8 @@ local rule split-top-feature ( feature-plus )
|
||||
# Given a set of properties, add default values for features not represented in
|
||||
# the set.
|
||||
#
|
||||
# properties must be fully expanded and must not contain conditionals.
|
||||
#
|
||||
# Note: if there's an ordinary feature F1 and a composite feature F2 which
|
||||
# includes some value for F1 and both feature have default values then the
|
||||
# default value of F1 will be added (as opposed to the value in F2). This might
|
||||
@@ -1070,27 +1077,65 @@ rule add-defaults ( properties * )
|
||||
feature ;
|
||||
}
|
||||
}
|
||||
# We don't add default for elements with ":" inside. This catches:
|
||||
# 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
|
||||
# to be takes as specified value for <variant>
|
||||
# 2. Free properties with ":" in values. We don't care, since free
|
||||
# properties don't have defaults.
|
||||
local xproperties = [ MATCH "^([^:]+)$" : $(properties) ] ;
|
||||
local missing-top = [ set.difference $(.all-top-features) : $(xproperties:G) ] ;
|
||||
local missing-top = [ set.difference $(.all-top-features) : $(properties:G) ] ;
|
||||
local more = [ defaults $(missing-top) ] ;
|
||||
properties += $(more) ;
|
||||
xproperties += $(more) ;
|
||||
|
||||
# Add defaults for subfeatures of features which are present.
|
||||
for local p in $(xproperties)
|
||||
# This is similar to property.refine, except that it
|
||||
# does not remove subfeatures, because we might be adding
|
||||
# the default value of a subfeature.
|
||||
local to-remove ;
|
||||
for local f in $(properties:G)
|
||||
{
|
||||
local s = $($(p:G).subfeatures) ;
|
||||
local f = [ utility.ungrist $(p:G) ] ;
|
||||
local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
|
||||
properties += [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
|
||||
if ! free in [ attributes $(f) ]
|
||||
{
|
||||
to-remove += $(f) ;
|
||||
}
|
||||
}
|
||||
|
||||
return $(properties) ;
|
||||
local worklist = $(properties) ;
|
||||
local expanded-from-composite ;
|
||||
local to-expand = $(more) ;
|
||||
while $(worklist)
|
||||
{
|
||||
# Add defaults for subfeatures of features which are present.
|
||||
for local p in $(worklist)
|
||||
{
|
||||
local s = $($(p:G).subfeatures) ;
|
||||
local f = [ utility.ungrist $(p:G) ] ;
|
||||
local missing-subs = [ set.difference <$(f)-$(s)> : $(properties:G) ] ;
|
||||
local sd = [ defaults [ select-subfeatures $(p) : $(missing-subs) ] ] ;
|
||||
to-expand += $(sd) ;
|
||||
}
|
||||
worklist = ;
|
||||
|
||||
# Expand subfeatures of newly added properties
|
||||
for local m in [ sequence.transform expand-composite : $(to-expand) ]
|
||||
{
|
||||
if ! $(m:G) in $(to-remove)
|
||||
{
|
||||
local att = [ attributes $(m:G) ] ;
|
||||
if $(m:G) in $(expanded-from-composite) &&
|
||||
! free in $(att) &&
|
||||
! $(m) in $(more)
|
||||
{
|
||||
import errors ;
|
||||
errors.error "default values for $(p:G) conflict" ;
|
||||
}
|
||||
if ! $(m) in $(to-expand)
|
||||
{
|
||||
expanded-from-composite += $(m:G) ;
|
||||
}
|
||||
more += $(m) ;
|
||||
if ! subfeature in $(att) && ! free in $(att)
|
||||
{
|
||||
worklist += $(m) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
to-expand = ;
|
||||
}
|
||||
|
||||
return [ sequence.unique $(properties) $(more) ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -1124,6 +1169,41 @@ rule split ( property-set )
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
# Returns all the features that also must be relevant when these features are relevant
|
||||
rule expand-relevant ( features * )
|
||||
{
|
||||
local conditional ;
|
||||
local result ;
|
||||
for f in $(features)
|
||||
{
|
||||
# This looks like a conditional, even though it isn't really.
|
||||
# (Free features can never be used in conditionals)
|
||||
local split = [ MATCH "^(.*):<relevant>(.*)$" : $(f) ] ;
|
||||
if $(split)
|
||||
{
|
||||
local-dependencies.$(split[1]) += $(split[2]) ;
|
||||
conditional += local-dependencies.$(split[1]) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
result += $(f) ;
|
||||
}
|
||||
}
|
||||
local queue = $(result) ;
|
||||
while $(queue)
|
||||
{
|
||||
local added = [ set.difference
|
||||
$(.feature-dependencies.$(queue))
|
||||
$(local-dependencies.$(queue))
|
||||
: $(result) ] ;
|
||||
result += $(added) ;
|
||||
queue = $(added) ;
|
||||
}
|
||||
# Clean up local map
|
||||
$(conditional) = ;
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
|
||||
# Tests of module feature.
|
||||
#
|
||||
@@ -1202,6 +1282,9 @@ rule __test__ ( )
|
||||
assert.result <define>foo=x-y
|
||||
: expand-subfeatures <define>foo=x-y ;
|
||||
|
||||
assert.result <define>minus=-
|
||||
: expand-subfeatures <define>minus=- ;
|
||||
|
||||
assert.result <toolset>gcc <toolset-gcc:version>3.0.1
|
||||
: expand-subfeatures gcc-3.0.1 ;
|
||||
|
||||
@@ -1234,17 +1317,18 @@ rule __test__ ( )
|
||||
|
||||
assert.result optional : attributes <fu> ;
|
||||
|
||||
assert.result <runtime-link>static <define>foobar <optimization>on
|
||||
<toolset>gcc:<define>FOO <toolset>gcc <variant>debug <stdlib>native
|
||||
<dummy>dummy1 <toolset-gcc:version>2.95.2
|
||||
: add-defaults <runtime-link>static <define>foobar <optimization>on
|
||||
<toolset>gcc:<define>FOO ;
|
||||
assert.result [ SORT <define>_DEBUG <runtime-link>static
|
||||
<define>foobar <optimization>on
|
||||
<toolset>gcc <variant>debug <stdlib>native
|
||||
<dummy>dummy1 <toolset-gcc:version>2.95.2 ]
|
||||
: add-defaults <runtime-link>static <define>foobar <optimization>on ;
|
||||
|
||||
assert.result <runtime-link>static <define>foobar <optimization>on
|
||||
<toolset>gcc:<define>FOO <fu>fu1 <toolset>gcc <variant>debug
|
||||
<stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2
|
||||
assert.result [ SORT <define>_DEBUG <runtime-link>static
|
||||
<define>foobar <optimization>on
|
||||
<fu>fu1 <toolset>gcc <variant>debug
|
||||
<stdlib>native <dummy>dummy1 <fu-subfu2>q <toolset-gcc:version>2.95.2 ]
|
||||
: add-defaults <runtime-link>static <define>foobar <optimization>on
|
||||
<toolset>gcc:<define>FOO <fu>fu1 ;
|
||||
<fu>fu1 ;
|
||||
|
||||
set-default <runtime-link> : static ;
|
||||
assert.result <runtime-link>static : defaults <runtime-link> ;
|
||||
|
||||
@@ -167,8 +167,10 @@ class generator
|
||||
import utility ;
|
||||
import path ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
import sequence ;
|
||||
import set ;
|
||||
import toolset ;
|
||||
import type ;
|
||||
import virtual-target ;
|
||||
|
||||
@@ -236,6 +238,10 @@ class generator
|
||||
# Note that 'transform' here, is the same as 'for_each'.
|
||||
sequence.transform type.validate : $(self.source-types) ;
|
||||
sequence.transform type.validate : $(self.target-types) ;
|
||||
|
||||
local relevant-for-generator =
|
||||
[ sequence.transform utility.ungrist : $(requirements:G) ] ;
|
||||
self.relevant-features = [ property-set.create <relevant>$(relevant-for-generator) ] ;
|
||||
}
|
||||
|
||||
################# End of constructor #################
|
||||
@@ -406,10 +412,10 @@ class generator
|
||||
}
|
||||
|
||||
local result ;
|
||||
if $(consumed)
|
||||
if $(consumed[2])
|
||||
{
|
||||
result = [ construct-result $(consumed) : $(project) $(name) :
|
||||
$(property-set) ] ;
|
||||
result = [ construct-result $(consumed[2-]) : $(project) $(name) :
|
||||
[ $(property-set).add $(consumed[1]) ] ] ;
|
||||
}
|
||||
|
||||
if $(result)
|
||||
@@ -421,7 +427,11 @@ class generator
|
||||
generators.dout [ indent ] " FAILURE" ;
|
||||
}
|
||||
generators.dout ;
|
||||
return $(result) ;
|
||||
if $(result)
|
||||
{
|
||||
# Make sure that we propagate usage-requirements up the stack.
|
||||
return [ $(result[1]).add $(consumed[1]) ] $(result[2-]) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Constructs the dependency graph to be returned by this generator.
|
||||
@@ -443,6 +453,11 @@ class generator
|
||||
)
|
||||
{
|
||||
local result ;
|
||||
|
||||
local relevant = [ toolset.relevant $(self.rule-name) ] ;
|
||||
relevant = [ $(relevant).add $(self.relevant-features) ] ;
|
||||
property-set = [ $(property-set).add $(relevant) ] ;
|
||||
|
||||
# If this is a 1->1 transformation, apply it to all consumed targets in
|
||||
# order.
|
||||
if ! $(self.source-types[2]) && ! $(self.composing)
|
||||
@@ -458,7 +473,10 @@ class generator
|
||||
result += [ generated-targets $(consumed) : $(property-set) :
|
||||
$(project) $(name) ] ;
|
||||
}
|
||||
return $(result) ;
|
||||
if $(result)
|
||||
{
|
||||
return $(relevant) $(result) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Determine target name from fullname (maybe including path components)
|
||||
@@ -576,6 +594,7 @@ class generator
|
||||
{
|
||||
local _consumed ;
|
||||
local missing-types ;
|
||||
local usage-requirements ;
|
||||
|
||||
if $(sources[2])
|
||||
{
|
||||
@@ -588,6 +607,7 @@ class generator
|
||||
local temp = [ consume-directly $(sources) ] ;
|
||||
if $(temp[1])
|
||||
{
|
||||
usage-requirements = [ property-set.empty ] ;
|
||||
_consumed = $(temp[1]) ;
|
||||
}
|
||||
missing-types = $(temp[2-]) ;
|
||||
@@ -613,7 +633,7 @@ class generator
|
||||
# everything to the required type. There is no need to rerun it on
|
||||
# targets of different types.
|
||||
|
||||
# NOTE: ignoring usage requirements.
|
||||
usage-requirements = $(transformed[1]) ;
|
||||
for local t in $(transformed[2-])
|
||||
{
|
||||
if [ $(t).type ] in $(missing-types)
|
||||
@@ -623,7 +643,7 @@ class generator
|
||||
}
|
||||
}
|
||||
|
||||
return [ sequence.unique $(_consumed) ] ;
|
||||
return $(usage-requirements) [ sequence.unique $(_consumed) ] ;
|
||||
}
|
||||
|
||||
# Converts several files to consumable types. Called for composing
|
||||
@@ -638,10 +658,11 @@ class generator
|
||||
if ! $(self.source-types)
|
||||
{
|
||||
# Anything is acceptible
|
||||
return $(sources) ;
|
||||
return [ property-set.empty ] $(sources) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local usage-requirements = [ property-set.empty ] ;
|
||||
local acceptible-types = [ sequence.unique
|
||||
[ sequence.transform type.all-derived : $(self.source-types) ] ] ;
|
||||
for local source in $(sources)
|
||||
@@ -661,13 +682,17 @@ class generator
|
||||
{
|
||||
generators.dout [ indent ] " failed to convert " $(source) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
usage-requirements = [ $(usage-requirements).add $(transformed[1]) ] ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result += $(source) ;
|
||||
}
|
||||
}
|
||||
return [ sequence.unique $(result) : stable ] ;
|
||||
return $(usage-requirements) [ sequence.unique $(result) : stable ] ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -441,7 +441,7 @@ rule initialize (
|
||||
local jamroot ;
|
||||
|
||||
local parent-module ;
|
||||
if $(module-name) = test-config
|
||||
if $(module-name) in test-config all-config
|
||||
{
|
||||
# No parent.
|
||||
}
|
||||
@@ -502,7 +502,7 @@ rule initialize (
|
||||
}
|
||||
else
|
||||
{
|
||||
local cfgs = project site test user ;
|
||||
local cfgs = project site test user all ;
|
||||
if ! $(module-name) in $(cfgs)-config
|
||||
{
|
||||
# This is a standalone project with known location. Set its
|
||||
@@ -833,7 +833,7 @@ rule current ( )
|
||||
# Temporarily changes the current project to 'project'. Should be followed by
|
||||
# 'pop-current'.
|
||||
#
|
||||
rule push-current ( project )
|
||||
rule push-current ( project ? )
|
||||
{
|
||||
.saved-current-project += $(.current-project) ;
|
||||
.current-project = $(project) ;
|
||||
@@ -900,9 +900,9 @@ rule get-jamroot-module ( project )
|
||||
|
||||
# Returns the project target corresponding to the 'project-module'.
|
||||
#
|
||||
rule target ( project-module )
|
||||
rule target ( project-module : allow-missing ? )
|
||||
{
|
||||
if ! $(.target.$(project-module))
|
||||
if ! $(.target.$(project-module)) && ! $(allow-missing)
|
||||
{
|
||||
import errors ;
|
||||
errors.user-error Project target requested but not yet assigned for
|
||||
|
||||
@@ -34,6 +34,7 @@ class property-set
|
||||
{
|
||||
import errors ;
|
||||
import feature ;
|
||||
import modules ;
|
||||
import path ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
@@ -86,6 +87,26 @@ class property-set
|
||||
return $(self.free) ;
|
||||
}
|
||||
|
||||
# Returns relevant base properties
|
||||
rule base-relevant ( )
|
||||
{
|
||||
if ! $(self.relevant-initialized)
|
||||
{
|
||||
init-relevant ;
|
||||
}
|
||||
return $(self.base-relevant) ;
|
||||
}
|
||||
|
||||
# Returns all relevant properties
|
||||
rule relevant
|
||||
{
|
||||
if ! $(self.relevant-initialized)
|
||||
{
|
||||
init-relevant ;
|
||||
}
|
||||
return $(self.relevant) ;
|
||||
}
|
||||
|
||||
# Returns dependency properties.
|
||||
#
|
||||
rule dependency ( )
|
||||
@@ -215,7 +236,7 @@ class property-set
|
||||
{
|
||||
if ! $(self.as-path)
|
||||
{
|
||||
self.as-path = [ property.as-path [ base ] ] ;
|
||||
self.as-path = [ property.as-path [ base-relevant ] ] ;
|
||||
}
|
||||
return $(self.as-path) ;
|
||||
}
|
||||
@@ -345,6 +366,30 @@ class property-set
|
||||
self.base-initialized = true ;
|
||||
}
|
||||
|
||||
rule init-relevant ( )
|
||||
{
|
||||
local relevant-features = [ get <relevant> ] ;
|
||||
relevant-features = [ feature.expand-relevant $(relevant-features) ] ;
|
||||
relevant-features = <$(relevant-features)> ;
|
||||
ignore-relevance = [ modules.peek property-set : .ignore-relevance ] ;
|
||||
for local p in $(self.raw)
|
||||
{
|
||||
if $(ignore-relevance) || $(p:G) in $(relevant-features)
|
||||
{
|
||||
local att = [ feature.attributes $(p:G) ] ;
|
||||
if ! ( incidental in $(att) )
|
||||
{
|
||||
self.relevant += $(p) ;
|
||||
if ! ( free in $(att) )
|
||||
{
|
||||
self.base-relevant += $(p) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
self.relevant-initialized = true ;
|
||||
}
|
||||
|
||||
rule init-dependency ( )
|
||||
{
|
||||
for local p in $(self.raw)
|
||||
@@ -369,7 +414,7 @@ class property-set
|
||||
# characters as well, e.g. free or indirect properties. Indirect
|
||||
# properties for example contain a full Jamfile path in their value
|
||||
# which on Windows file systems contains ':' as the drive separator.
|
||||
if [ MATCH (:) : $(p:G=) ]
|
||||
if ( [ MATCH (:) : $(p:G=) ] && ! ( free in [ feature.attributes $(p:G) ] ) ) || $(p:G) = <conditional>
|
||||
{
|
||||
self.conditional += $(p) ;
|
||||
}
|
||||
@@ -382,6 +427,13 @@ class property-set
|
||||
}
|
||||
}
|
||||
|
||||
# This is a temporary measure to help users work around
|
||||
# any problems. Remove it once we've verified that
|
||||
# everything works.
|
||||
if --ignore-relevance in [ modules.peek : ARGV ]
|
||||
{
|
||||
.ignore-relevance = true ;
|
||||
}
|
||||
|
||||
# Creates a new 'property-set' instance for the given raw properties or returns
|
||||
# an already existing ones.
|
||||
|
||||
@@ -28,8 +28,20 @@ rule refine ( properties * : requirements * )
|
||||
for local r in $(requirements)
|
||||
{
|
||||
# Do not consider conditional requirements.
|
||||
if ! [ MATCH (:) : $(r:G=) ] && ! free in [ feature.attributes $(r:G) ]
|
||||
if ! [ MATCH (:<) : $(r:G=) ] && ! free in [ feature.attributes $(r:G) ]
|
||||
{
|
||||
if ! $(r) in $(properties)
|
||||
{
|
||||
# Kill subfeatures of properties that we're changing
|
||||
local sub = [ modules.peek feature : $(r:G).subfeatures ] ;
|
||||
if $(sub)
|
||||
{
|
||||
# non-specific subfeatures are still valid
|
||||
sub = [ MATCH "(.*:.*)" : $(sub) ] ;
|
||||
local name = [ utility.ungrist $(r:G) ] ;
|
||||
unset += <$(name)-$(sub)> ;
|
||||
}
|
||||
}
|
||||
unset += $(r:G) ;
|
||||
}
|
||||
}
|
||||
@@ -37,7 +49,7 @@ rule refine ( properties * : requirements * )
|
||||
# Remove properties that are overridden by requirements
|
||||
for local p in $(properties)
|
||||
{
|
||||
if [ MATCH (:) : $(p:G=) ] || ! $(p:G) in $(unset)
|
||||
if [ MATCH (:<) : $(p:G=) ] || ! $(p:G) in $(unset)
|
||||
{
|
||||
result += $(p) ;
|
||||
}
|
||||
@@ -55,12 +67,17 @@ rule evaluate-conditionals-in-context ( properties * : context * )
|
||||
{
|
||||
local base ;
|
||||
local conditionals ;
|
||||
local indirect ;
|
||||
for local p in $(properties)
|
||||
{
|
||||
if [ MATCH (:<) : $(p) ]
|
||||
if [ MATCH (:<) : $(p) ] && ! free in [ feature.attributes $(p:G) ]
|
||||
{
|
||||
conditionals += $(p) ;
|
||||
}
|
||||
else if $(p:G) = <conditional>
|
||||
{
|
||||
indirect += $(p) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
base += $(p) ;
|
||||
@@ -116,10 +133,58 @@ rule evaluate-conditionals-in-context ( properties * : context * )
|
||||
}
|
||||
}
|
||||
}
|
||||
# Import here to avoid cyclic dependency
|
||||
import project ;
|
||||
for local i in [ MATCH "^@(.*)" : $(indirect:G=) ]
|
||||
{
|
||||
# If the rule was set in a project module, translate paths
|
||||
# relative to that project's location.
|
||||
local m = [ indirect.get-module $(i) ] ;
|
||||
local p = [ project.target $(m) : allow-missing ] ;
|
||||
local new = [ indirect.call $(i) $(context) ] ;
|
||||
if $(p) && [ $(p).location ]
|
||||
{
|
||||
result += [ translate-paths $(new) : [ $(p).location ] ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
result += $(new) ;
|
||||
}
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
|
||||
# Returns <relevant> properties indicating how the conditionals in
|
||||
# properties affect feature relevance. If the optional argument cond
|
||||
# is passed, it is treated as extra conditions for all properties.
|
||||
#
|
||||
rule evaluate-conditional-relevance ( properties * : cond * )
|
||||
{
|
||||
cond = [ sequence.transform utility.ungrist : $(cond:G) ] ;
|
||||
local result ;
|
||||
for local p in $(properties)
|
||||
{
|
||||
# Separate condition and property.
|
||||
local s = [ MATCH ^(.*):(<.*) : $(p) ] ;
|
||||
if ! $(s) || free in [ feature.attributes $(p:G) ]
|
||||
{
|
||||
local value = [ utility.ungrist $(p:G) ] ;
|
||||
result += <relevant>$(value):<relevant>$(cond) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local condition = [ regex.split $(s[1]) "," ] ;
|
||||
condition = [ MATCH ^!?(.*) : $(condition) ] ;
|
||||
condition = [ sequence.transform utility.ungrist : $(condition:G) ] $(cond) ;
|
||||
local value = [ utility.ungrist $(s[2]:G) ] ;
|
||||
result += <relevant>$(value):<relevant>$(condition) ;
|
||||
}
|
||||
}
|
||||
return [ sequence.unique $(result) ] ;
|
||||
}
|
||||
|
||||
|
||||
rule expand-subfeatures-in-conditions ( properties * )
|
||||
{
|
||||
local result ;
|
||||
|
||||
@@ -191,6 +191,7 @@ class project-target : abstract-target
|
||||
import property-set ;
|
||||
import set ;
|
||||
import sequence ;
|
||||
import toolset ;
|
||||
import "class" : new ;
|
||||
|
||||
rule __init__ ( name : project-module parent-project ?
|
||||
@@ -584,6 +585,8 @@ class project-target : abstract-target
|
||||
IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules)
|
||||
;
|
||||
EXPORT $(this-module) : $(user-rules) ;
|
||||
|
||||
toolset.inherit-flags $(this-module) : $(parent-module) ;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -621,10 +624,11 @@ class main-target : abstract-target
|
||||
{
|
||||
import assert ;
|
||||
import feature ;
|
||||
import print ;
|
||||
import property-set ;
|
||||
import sequence ;
|
||||
import set ;
|
||||
import targets : start-building end-building ;
|
||||
import utility ;
|
||||
|
||||
rule __init__ ( name : project )
|
||||
{
|
||||
@@ -719,6 +723,43 @@ class main-target : abstract-target
|
||||
}
|
||||
}
|
||||
|
||||
# Features are relevant here if they could affect alternative
|
||||
# selection. That is, base, non-conditional properties that
|
||||
# are not identical in all target alternatives.
|
||||
rule relevant-features ( )
|
||||
{
|
||||
if $(self.alternatives[2-])
|
||||
{
|
||||
if $(self.relevant-features)
|
||||
{
|
||||
return $(self.relevant-features) ;
|
||||
}
|
||||
local all-properties ;
|
||||
for t in $(self.alternatives)
|
||||
{
|
||||
local ps = [ $(t).requirements ] ;
|
||||
ps = [ property-set.create [ $(ps).non-conditional ] ] ;
|
||||
all-properties += [ $(ps).base ] ;
|
||||
}
|
||||
all-properties = [ sequence.unique $(all-properties) ] ;
|
||||
local result ;
|
||||
for t in $(self.alternatives)
|
||||
{
|
||||
local ps = [ $(t).requirements ] ;
|
||||
ps = [ property-set.create [ $(ps).non-conditional ] ] ;
|
||||
local properties = [ set.difference $(all-properties) : [ $(ps).base ] ] ;
|
||||
result += $(properties:G) ;
|
||||
}
|
||||
result = [ sequence.transform utility.ungrist : [ sequence.unique $(result) ] ] ;
|
||||
self.relevant-features = [ property-set.create <relevant>$(result) ] ;
|
||||
return $(self.relevant-features) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
return [ property-set.empty ] ;
|
||||
}
|
||||
}
|
||||
|
||||
rule apply-default-build ( property-set )
|
||||
{
|
||||
return [ targets.apply-default-build $(property-set) :
|
||||
@@ -734,16 +775,13 @@ class main-target : abstract-target
|
||||
{
|
||||
start-building $(__name__) ;
|
||||
|
||||
# We want composite properties in the build request to act as if all the
|
||||
# properties they expand to have been explicitly specified.
|
||||
property-set = [ $(property-set).expand ] ;
|
||||
|
||||
local all-property-sets = [ apply-default-build $(property-set) ] ;
|
||||
local relevant = [ relevant-features ] ;
|
||||
local usage-requirements = [ property-set.empty ] ;
|
||||
local result ;
|
||||
for local p in $(all-property-sets)
|
||||
{
|
||||
local r = [ generate-really $(p) ] ;
|
||||
local r = [ generate-really [ $(p).add $(relevant) ] ] ;
|
||||
if $(r)
|
||||
{
|
||||
usage-requirements = [ $(usage-requirements).add $(r[1]) ] ;
|
||||
@@ -901,8 +939,10 @@ rule apply-default-build ( property-set : default-build )
|
||||
# 1. First, see what properties from default-build are already present in
|
||||
# property-set.
|
||||
|
||||
local expanded = [ $(property-set).expand ] ;
|
||||
local raw = [ $(property-set).raw ] ;
|
||||
local specified-features = $(raw:G) ;
|
||||
local specified-features = [ $(expanded).raw ] ;
|
||||
specified-features = $(specified-features:G) ;
|
||||
|
||||
local defaults-to-apply ;
|
||||
for local d in [ $(default-build).raw ]
|
||||
@@ -936,7 +976,7 @@ rule apply-default-build ( property-set : default-build )
|
||||
for local p in $(properties)
|
||||
{
|
||||
result += [ property-set.create
|
||||
[ feature.expand [ feature.split $(p) ] ] ] ;
|
||||
[ feature.split $(p) ] ] ;
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -993,7 +1033,7 @@ rule common-properties ( build-request requirements )
|
||||
# If 'what' is 'refined' returns context refined with new requirements. If
|
||||
# 'what' is 'added' returns just the requirements to be applied.
|
||||
#
|
||||
rule evaluate-requirements ( requirements : context : what )
|
||||
rule evaluate-requirements ( requirements : context )
|
||||
{
|
||||
# Apply non-conditional requirements. It is possible that further
|
||||
# conditional requirement change a value set by non-conditional
|
||||
@@ -1019,42 +1059,29 @@ rule evaluate-requirements ( requirements : context : what )
|
||||
|
||||
local conditionals = [ $(requirements).conditional ] ;
|
||||
# The 'count' variable has one element for each conditional feature and for
|
||||
# each occurrence of '<indirect-conditional>' feature. It is used as a loop
|
||||
# each occurrence of '<conditional>' feature. It is used as a loop
|
||||
# counter: for each iteration of the loop before we remove one element and
|
||||
# the property set should stabilize before we are done. It is assumed that
|
||||
# #conditionals iterations should be enough for properties to propagate
|
||||
# along conditions in any direction.
|
||||
local count = $(conditionals) [ $(requirements).get <conditional> ]
|
||||
local count = $(conditionals)
|
||||
and-once-more ;
|
||||
|
||||
local added-requirements ;
|
||||
|
||||
local current = $(raw) ;
|
||||
|
||||
# It is assumed that ordinary conditional requirements can not add
|
||||
# <conditional> properties (a.k.a. indirect conditional properties), and
|
||||
# that rules referred to by <conditional> properties can not add new
|
||||
# <conditional> properties. So the list of indirect conditionals does not
|
||||
# change.
|
||||
local indirect = [ $(requirements).get <conditional> ] ;
|
||||
indirect = [ MATCH ^@(.*) : $(indirect) ] ;
|
||||
|
||||
local ok ;
|
||||
while $(count)
|
||||
{
|
||||
# We need to expand composites here so that the requirements can
|
||||
# safely override composite features.
|
||||
current = [ feature.expand-composites $(current) ] ;
|
||||
current = [ feature.add-defaults $(current) ] ;
|
||||
# Evaluate conditionals in context of current properties.
|
||||
local e = [ property.evaluate-conditionals-in-context $(conditionals) :
|
||||
$(current) ] ;
|
||||
|
||||
# Evaluate indirect conditionals.
|
||||
for local i in $(indirect)
|
||||
{
|
||||
local t = [ current ] ;
|
||||
local p = [ $(t).project ] ;
|
||||
local new = [ indirect.call $(i) $(current) ] ;
|
||||
e += [ property.translate-paths $(new) : [ $(p).location ] ] ;
|
||||
}
|
||||
|
||||
if $(e) = $(added-requirements)
|
||||
{
|
||||
# If we got the same result, we have found the final properties.
|
||||
@@ -1077,39 +1104,13 @@ rule evaluate-requirements ( requirements : context : what )
|
||||
errors.error Can not evaluate conditional properties $(conditionals) ;
|
||||
}
|
||||
|
||||
if $(what) = added
|
||||
{
|
||||
return [ property-set.create $(unconditional) $(added-requirements) ] ;
|
||||
}
|
||||
else if $(what) = refined
|
||||
{
|
||||
return [ property-set.create $(current) ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
import errors ;
|
||||
errors.error "Invalid value of the 'what' parameter." ;
|
||||
}
|
||||
return [ property-set.create $(current) ] ;
|
||||
}
|
||||
|
||||
|
||||
rule common-properties2 ( build-request requirements )
|
||||
{
|
||||
# This guarantees that default properties are present in the result, unless
|
||||
# they are overriden by some requirement. FIXME: There is a possibility that
|
||||
# we have added <foo>bar, which is composite and expands to <foo2>bar2, but
|
||||
# default value of <foo2> is not bar2, in which case it is not clear what to
|
||||
# do.
|
||||
#
|
||||
build-request = [ $(build-request).add-defaults ] ;
|
||||
# Features added by 'add-defaults' can be composite and expand to features
|
||||
# without default values -- which therefore have not been added yet. It
|
||||
# could be clearer/faster to expand only newly added properties but that is
|
||||
# not critical.
|
||||
build-request = [ $(build-request).expand ] ;
|
||||
|
||||
return [ evaluate-requirements $(requirements) : $(build-request) :
|
||||
refined ] ;
|
||||
return [ evaluate-requirements $(requirements) : $(build-request) ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -1214,9 +1215,9 @@ class basic-target : abstract-target
|
||||
#
|
||||
rule match ( property-set debug ? )
|
||||
{
|
||||
# The condition is composed of all base non-conditional properties. It
|
||||
# is not clear if we should expand 'self.requirements' or not. For one
|
||||
# thing, it would be nice to be able to put
|
||||
# The condition is composed of all base non-conditional properties. We
|
||||
# only expand subfeatures in the condition. We do not expand
|
||||
# composites. We want to be able to put
|
||||
# <toolset>msvc-6.0
|
||||
# in requirements. On the other hand, if we have <variant>release as a
|
||||
# condition it does not make sense to require <optimization>full to be
|
||||
@@ -1224,6 +1225,7 @@ class basic-target : abstract-target
|
||||
local bcondition = [ $(self.requirements).base ] ;
|
||||
local ccondition = [ $(self.requirements).conditional ] ;
|
||||
local condition = [ set.difference $(bcondition) : $(ccondition) ] ;
|
||||
condition = [ feature.expand-subfeatures $(condition) : unchecked ] ;
|
||||
if $(debug)
|
||||
{
|
||||
ECHO " next alternative: required properties:"
|
||||
@@ -1294,7 +1296,7 @@ class basic-target : abstract-target
|
||||
# define=FOO
|
||||
# he most likely wants this define to be set for all compiles.
|
||||
# Make it before check for already built.
|
||||
property-set = [ $(property-set).refine
|
||||
property-set = [ $(property-set).add
|
||||
[ build-system.command-line-free-features ] ] ;
|
||||
|
||||
if ! $(self.generated.$(property-set))
|
||||
@@ -1357,6 +1359,15 @@ class basic-target : abstract-target
|
||||
local gur = $(result[1]) ;
|
||||
result = $(result[2-]) ;
|
||||
|
||||
# Relevant is automatically applied to usage requirements
|
||||
# and only applies for propagated features
|
||||
local relevant = [ propagated-relevant
|
||||
[ $(gur).get <relevant> ]
|
||||
[ $(rproperties).get <relevant> ] ] ;
|
||||
gur = [ property-set.create
|
||||
[ property.change [ $(gur).raw ] : <relevant> ]
|
||||
<relevant>$(relevant) ] ;
|
||||
|
||||
if $(self.always)
|
||||
{
|
||||
for local t in $(result)
|
||||
@@ -1445,8 +1456,25 @@ class basic-target : abstract-target
|
||||
rule compute-usage-requirements ( subvariant )
|
||||
{
|
||||
local rproperties = [ $(subvariant).build-properties ] ;
|
||||
xusage-requirements = [ targets.evaluate-requirements
|
||||
$(self.usage-requirements) : $(rproperties) : added ] ;
|
||||
xusage-requirements =
|
||||
[ $(self.usage-requirements).evaluate-conditionals
|
||||
$(rproperties) ] ;
|
||||
|
||||
# Filter out non-propagated <relevant> properties
|
||||
local relevant ;
|
||||
for local r in [ $(xusage-requirements).get <relevant> ]
|
||||
{
|
||||
local check = [ MATCH "(.*):<relevant>(.*)" : $(r) ] ;
|
||||
if $(check) { check = $(check[2]) ; }
|
||||
else { check = $(r) ; }
|
||||
if propagated in [ feature.attributes <$(check)> ]
|
||||
{
|
||||
relevant += $(r) ;
|
||||
}
|
||||
}
|
||||
xusage-requirements = [ property-set.create
|
||||
[ property.change [ $(xusage-requirements).raw ] : <relevant> ]
|
||||
<relevant>$(relevant) ] ;
|
||||
|
||||
# We generate all dependency properties and add them, as well as their
|
||||
# usage requirements, to the result.
|
||||
@@ -1487,6 +1515,19 @@ class basic-target : abstract-target
|
||||
return [ $(result).add [ property-set.create $(raw) ] ] ;
|
||||
}
|
||||
|
||||
local rule propagated-relevant ( values * )
|
||||
{
|
||||
local result ;
|
||||
for local v in [ feature.expand-relevant $(values) ]
|
||||
{
|
||||
if propagated in [ feature.attributes <$(v)> ]
|
||||
{
|
||||
result += $(v) ;
|
||||
}
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
# Creates new subvariant instances for 'targets'.
|
||||
# 'root-targets' - virtual targets to be returned to dependants
|
||||
# 'all-targets' - virtual targets created while building this main target
|
||||
@@ -1633,7 +1674,8 @@ rule main-target-requirements (
|
||||
import errors ;
|
||||
errors.error "Conflicting requirements for target:" $(requirements) ;
|
||||
}
|
||||
return [ $(requirements).add [ toolset.requirements ] ] ;
|
||||
local result = [ $(requirements).add [ toolset.requirements ] ] ;
|
||||
return [ $(result).add-raw [ property.evaluate-conditional-relevance [ $(result).raw ] ] ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -1656,7 +1698,10 @@ rule main-target-usage-requirements (
|
||||
$(specification)
|
||||
: [ $(project).project-module ] [ $(project).get location ] ] ;
|
||||
|
||||
return [ $(project-usage-requirements).add $(usage-requirements) ] ;
|
||||
local result = [ $(project-usage-requirements).add $(usage-requirements) ] ;
|
||||
local relevant =
|
||||
[ property.evaluate-conditional-relevance [ $(result).raw ] ] ;
|
||||
return [ $(result).add-raw $(relevant) ] ;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ import set ;
|
||||
import property-set ;
|
||||
import order ;
|
||||
import "class" : new ;
|
||||
import utility ;
|
||||
|
||||
|
||||
.flag-no = 1 ;
|
||||
@@ -76,6 +77,36 @@ rule pop-checking-for-flags-module ( )
|
||||
}
|
||||
|
||||
|
||||
# Specifies features that are referenced by the action rule.
|
||||
# This is necessary in order to detect that these features
|
||||
# are relevant.
|
||||
#
|
||||
rule uses-features ( rule-or-module : features * : unchecked ? )
|
||||
{
|
||||
local caller = [ CALLER_MODULE ] ;
|
||||
if ! [ MATCH ".*([.]).*" : $(rule-or-module) ]
|
||||
&& [ MATCH "(Jamfile<.*)" : $(caller) ]
|
||||
{
|
||||
# Unqualified rule name, used inside Jamfile. Most likely used with
|
||||
# 'make' or 'notfile' rules. This prevents setting flags on the entire
|
||||
# Jamfile module (this will be considered as rule), but who cares?
|
||||
# Probably, 'flags' rule should be split into 'flags' and
|
||||
# 'flags-on-module'.
|
||||
rule-or-module = $(caller).$(rule-or-module) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local module_ = [ MATCH "([^.]*).*" : $(rule-or-module) ] ;
|
||||
if $(unchecked) != unchecked
|
||||
&& $(.flags-module-checking[1]) != unchecked
|
||||
&& $(module_) != $(caller)
|
||||
{
|
||||
errors.error "Module $(caller) attempted to set flags for module $(module_)" ;
|
||||
}
|
||||
}
|
||||
.uses-features.$(rule-or-module) += $(features) ;
|
||||
}
|
||||
|
||||
# Specifies the flags (variables) that must be set on targets under certain
|
||||
# conditions, described by arguments.
|
||||
#
|
||||
@@ -399,6 +430,34 @@ rule relevant-features ( rule-or-module )
|
||||
}
|
||||
}
|
||||
|
||||
# Returns a list of all the features which were
|
||||
# passed to uses-features.
|
||||
local rule used-features ( rule-or-module )
|
||||
{
|
||||
if ! $(.used-features.$(rule-or-module))
|
||||
{
|
||||
local result = $(.uses-features.$(rule-or-module)) ;
|
||||
|
||||
# Strip away last dot separated part and recurse.
|
||||
local next = [ MATCH ^(.+)\\.([^\\.])* : $(rule-or-module) ] ;
|
||||
if $(next)
|
||||
{
|
||||
result += [ used-features $(next[1]) ] ;
|
||||
}
|
||||
result = [ sequence.unique $(result) ] ;
|
||||
if $(result[1]) = ""
|
||||
{
|
||||
result = $(result) ;
|
||||
}
|
||||
.used-features.$(rule-or-module) = $(result) ;
|
||||
return $(result) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
return $(.used-features.$(rule-or-module)) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule filter-property-set ( rule-or-module : property-set )
|
||||
{
|
||||
local key = .filtered.property-set.$(rule-or-module).$(property-set) ;
|
||||
@@ -454,6 +513,23 @@ rule set-target-variables ( rule-or-module targets + : property-set )
|
||||
}
|
||||
|
||||
|
||||
# Returns a property-set indicating which features are relevant
|
||||
# for the given rule.
|
||||
#
|
||||
rule relevant ( rule-name )
|
||||
{
|
||||
if ! $(.relevant-features-ps.$(rule-name))
|
||||
{
|
||||
local features = [ sequence.transform utility.ungrist :
|
||||
[ relevant-features $(rule-name) ]
|
||||
[ used-features $(rule-name) ] ] ;
|
||||
.relevant-features-ps.$(rule-name) =
|
||||
[ property-set.create <relevant>$(features) ] ;
|
||||
}
|
||||
return $(.relevant-features-ps.$(rule-name)) ;
|
||||
}
|
||||
|
||||
|
||||
# Make toolset 'toolset', defined in a module of the same name, inherit from
|
||||
# 'base'.
|
||||
# 1. The 'init' rule from 'base' is imported into 'toolset' with full name.
|
||||
|
||||
@@ -999,10 +999,8 @@ rule register ( target )
|
||||
{
|
||||
local ps1 = [ $(a1).properties ] ;
|
||||
local ps2 = [ $(a2).properties ] ;
|
||||
local p1 = [ $(ps1).base ] [ $(ps1).free ] [ set.difference
|
||||
[ $(ps1).dependency ] : [ $(ps1).incidental ] ] ;
|
||||
local p2 = [ $(ps2).base ] [ $(ps2).free ] [ set.difference
|
||||
[ $(ps2).dependency ] : [ $(ps2).incidental ] ] ;
|
||||
local p1 = [ $(ps1).relevant ] ;
|
||||
local p2 = [ $(ps2).relevant ] ;
|
||||
if $(p1) = $(p2)
|
||||
{
|
||||
result = $(t) ;
|
||||
|
||||
@@ -99,7 +99,7 @@ LIST * evaluate_rule( RULE * rule, OBJECT * rulename, FRAME * frame )
|
||||
|
||||
/* Check traditional targets $(<) and sources $(>). */
|
||||
if ( !rule->actions && !rule->procedure )
|
||||
unknown_rule( frame, NULL, frame->module, rule->name );
|
||||
unknown_rule( frame, NULL, frame->module, rulename );
|
||||
|
||||
/* If this rule will be executed for updating the targets then construct the
|
||||
* action for make().
|
||||
|
||||
@@ -172,7 +172,10 @@ if "_%BOOST_JAM_TOOLSET_ROOT%_" == "__" (
|
||||
if "_%BOOST_JAM_ARCH%_" == "__" set BOOST_JAM_ARCH=x86
|
||||
set BOOST_JAM_ARGS=%BOOST_JAM_ARGS% %BOOST_JAM_ARCH%
|
||||
|
||||
REM return to current directory as vsdevcmd_end.bat switches to %USERPROFILE%\Source if it exists.
|
||||
pushd %CD%
|
||||
if "_%VSINSTALLDIR%_" == "__" call :Call_If_Exists "%BOOST_JAM_TOOLSET_ROOT%Auxiliary\Build\vcvarsall.bat" %BOOST_JAM_ARGS%
|
||||
popd
|
||||
set "BOOST_JAM_CC=cl /nologo /RTC1 /Zi /MTd /Fobootstrap/ /Fdbootstrap/ -DNT -DYYDEBUG -wd4996 kernel32.lib advapi32.lib user32.lib"
|
||||
set "BOOST_JAM_OPT_JAM=/Febootstrap\jam0"
|
||||
set "BOOST_JAM_OPT_MKJAMBASE=/Febootstrap\mkjambase0"
|
||||
|
||||
@@ -195,8 +195,11 @@ static void debug_lol_read( FILE * in, LOL * lol )
|
||||
|
||||
static void debug_frame_write( FILE * out, FRAME * frame )
|
||||
{
|
||||
OBJECT * fullname = make_absolute_path( frame->file );
|
||||
debug_object_write( out, frame->file );
|
||||
OBJECT * fullname = constant_builtin;
|
||||
OBJECT * file = frame->file;
|
||||
if ( file == NULL ) file = constant_builtin;
|
||||
else fullname = make_absolute_path( frame->file );
|
||||
debug_object_write( out, file );
|
||||
debug_int_write( out, frame->line );
|
||||
debug_object_write( out, fullname );
|
||||
debug_lol_write( out, frame->args );
|
||||
@@ -469,7 +472,8 @@ void debug_on_instruction( FRAME * frame, OBJECT * file, int line )
|
||||
{
|
||||
int breakpoint_id;
|
||||
assert( debug_is_debugging() );
|
||||
if ( debug_state == DEBUG_NEXT && debug_depth <= 0 && debug_line != line )
|
||||
if ( debug_state == DEBUG_NEXT &&
|
||||
( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) )
|
||||
{
|
||||
debug_file = file;
|
||||
debug_line = line;
|
||||
@@ -483,14 +487,15 @@ void debug_on_instruction( FRAME * frame, OBJECT * file, int line )
|
||||
debug_frame = frame;
|
||||
debug_end_stepping();
|
||||
}
|
||||
else if ( debug_state == DEBUG_FINISH && debug_depth <= 0 )
|
||||
else if ( debug_state == DEBUG_FINISH && debug_depth < 0 )
|
||||
{
|
||||
debug_file = file;
|
||||
debug_line = line;
|
||||
debug_frame = frame;
|
||||
debug_end_stepping();
|
||||
}
|
||||
else if ( ( debug_file == NULL || ! object_equal( file, debug_file ) || line != debug_line ) &&
|
||||
else if ( ( debug_file == NULL || ! object_equal( file, debug_file ) ||
|
||||
line != debug_line || debug_depth != 0 ) &&
|
||||
( breakpoint_id = handle_line_breakpoint( file, line ) ) )
|
||||
{
|
||||
debug_file = file;
|
||||
@@ -498,12 +503,19 @@ void debug_on_instruction( FRAME * frame, OBJECT * file, int line )
|
||||
debug_frame = frame;
|
||||
debug_on_breakpoint( breakpoint_id );
|
||||
}
|
||||
else if ( ( debug_state == DEBUG_RUN || debug_state == DEBUG_FINISH ) &&
|
||||
( debug_depth < 0 || ( debug_depth == 0 && debug_line != line ) ) )
|
||||
{
|
||||
debug_file = NULL;
|
||||
debug_line = 0;
|
||||
}
|
||||
}
|
||||
|
||||
void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int line )
|
||||
{
|
||||
int breakpoint_id;
|
||||
assert( debug_is_debugging() );
|
||||
++debug_depth;
|
||||
if ( debug_state == DEBUG_STEP && file )
|
||||
{
|
||||
debug_file = file;
|
||||
@@ -519,18 +531,18 @@ void debug_on_enter_function( FRAME * frame, OBJECT * name, OBJECT * file, int l
|
||||
debug_frame = frame;
|
||||
debug_on_breakpoint( breakpoint_id );
|
||||
}
|
||||
else if ( debug_state == DEBUG_NEXT || debug_state == DEBUG_FINISH )
|
||||
{
|
||||
++debug_depth;
|
||||
}
|
||||
}
|
||||
|
||||
void debug_on_exit_function( OBJECT * name )
|
||||
{
|
||||
assert( debug_is_debugging() );
|
||||
if ( debug_state == DEBUG_NEXT || debug_state == DEBUG_FINISH )
|
||||
--debug_depth;
|
||||
if ( debug_depth < 0 )
|
||||
{
|
||||
--debug_depth;
|
||||
/* The current location is no longer valid
|
||||
after we return from the containing function. */
|
||||
debug_file = NULL;
|
||||
debug_line = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -544,11 +556,13 @@ static int child_pid;
|
||||
static void debug_child_continue( int argc, const char * * argv )
|
||||
{
|
||||
debug_state = DEBUG_RUN;
|
||||
debug_depth = 0;
|
||||
}
|
||||
|
||||
static void debug_child_step( int argc, const char * * argv )
|
||||
{
|
||||
debug_state = DEBUG_STEP;
|
||||
debug_depth = 0;
|
||||
}
|
||||
|
||||
static void debug_child_next( int argc, const char * * argv )
|
||||
@@ -560,7 +574,7 @@ static void debug_child_next( int argc, const char * * argv )
|
||||
static void debug_child_finish( int argc, const char * * argv )
|
||||
{
|
||||
debug_state = DEBUG_FINISH;
|
||||
debug_depth = 1;
|
||||
debug_depth = 0;
|
||||
}
|
||||
|
||||
static void debug_child_kill( int argc, const char * * argv )
|
||||
@@ -1102,7 +1116,7 @@ static void debug_start_child( int argc, const char * * argv )
|
||||
PROCESS_INFORMATION pi = { NULL, NULL, 0, 0 };
|
||||
STARTUPINFO si = { sizeof( STARTUPINFO ), 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0 };
|
||||
assert( DEBUG_STATE == DEBUG_NO_CHILD );
|
||||
assert( debug_state == DEBUG_NO_CHILD );
|
||||
if ( ! CreatePipe( &pipe1[ 0 ], &pipe1[ 1 ], &sa, 0 ) )
|
||||
{
|
||||
printf("internal error\n");
|
||||
@@ -1189,7 +1203,7 @@ static void debug_start_child( int argc, const char * * argv )
|
||||
int read_fd;
|
||||
int pid;
|
||||
int i;
|
||||
assert( DEBUG_STATE == DEBUG_NO_CHILD );
|
||||
assert( debug_state == DEBUG_NO_CHILD );
|
||||
pipe(pipe1);
|
||||
pipe(pipe2);
|
||||
pid = fork();
|
||||
|
||||
@@ -1282,7 +1282,9 @@ static void string_new_from_argv( string * result, char const * const * argv )
|
||||
while ( *argv )
|
||||
{
|
||||
string_push_back( result, ' ' );
|
||||
string_push_back( result, '"' );
|
||||
string_append( result, *(argv++) );
|
||||
string_push_back( result, '"' );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -88,6 +88,7 @@ file_archive_info_t * file_archive_info( OBJECT * const path, int * found )
|
||||
|
||||
if ( !*found )
|
||||
{
|
||||
archive->name = path_key;
|
||||
archive->file = 0;
|
||||
archive->members = FL0;
|
||||
}
|
||||
@@ -292,6 +293,7 @@ file_info_t * file_query( OBJECT * const path )
|
||||
return ff;
|
||||
}
|
||||
|
||||
#ifndef OS_NT
|
||||
|
||||
/*
|
||||
* file_query_posix_() - query information about a path using POSIX stat()
|
||||
@@ -329,10 +331,45 @@ void file_query_posix_( file_info_t * const info )
|
||||
info->is_file = statbuf.st_mode & S_IFREG ? 1 : 0;
|
||||
info->is_dir = statbuf.st_mode & S_IFDIR ? 1 : 0;
|
||||
info->exists = 1;
|
||||
#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
|
||||
#if defined(OS_MACOSX)
|
||||
timestamp_init( &info->time, statbuf.st_mtimespec.tv_sec, statbuf.st_mtimespec.tv_nsec );
|
||||
#else
|
||||
timestamp_init( &info->time, statbuf.st_mtim.tv_sec, statbuf.st_mtim.tv_nsec );
|
||||
#endif
|
||||
#else
|
||||
timestamp_init( &info->time, statbuf.st_mtime, 0 );
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* file_supported_fmt_resolution() - file modification timestamp resolution
|
||||
*
|
||||
* Returns the minimum file modification timestamp resolution supported by this
|
||||
* Boost Jam implementation. File modification timestamp changes of less than
|
||||
* the returned value might not be recognized.
|
||||
*
|
||||
* Does not take into consideration any OS or file system related restrictions.
|
||||
*
|
||||
* Return value 0 indicates that any value supported by the OS is also supported
|
||||
* here.
|
||||
*/
|
||||
|
||||
void file_supported_fmt_resolution( timestamp * const t )
|
||||
{
|
||||
#if defined(_POSIX_VERSION) && _POSIX_VERSION >= 200809
|
||||
timestamp_init( t, 0, 1 );
|
||||
#else
|
||||
/* The current implementation does not support file modification timestamp
|
||||
* resolution of less than one second.
|
||||
*/
|
||||
timestamp_init( t, 1, 0 );
|
||||
#endif
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
/*
|
||||
* file_remove_atexit() - schedule a path to be removed on program exit
|
||||
|
||||
@@ -53,6 +53,7 @@ typedef file_info_t * * FILELISTITER; /* also &FILEITEM equivalent */
|
||||
|
||||
typedef struct file_archive_info_t
|
||||
{
|
||||
OBJECT * name;
|
||||
file_info_t * file;
|
||||
FILELIST * members;
|
||||
} file_archive_info_t;
|
||||
|
||||
@@ -201,27 +201,6 @@ void file_query_( file_info_t * const info )
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* file_supported_fmt_resolution() - file modification timestamp resolution
|
||||
*
|
||||
* Returns the minimum file modification timestamp resolution supported by this
|
||||
* Boost Jam implementation. File modification timestamp changes of less than
|
||||
* the returned value might not be recognized.
|
||||
*
|
||||
* Does not take into consideration any OS or file system related restrictions.
|
||||
*
|
||||
* Return value 0 indicates that any value supported by the OS is also supported
|
||||
* here.
|
||||
*/
|
||||
|
||||
void file_supported_fmt_resolution( timestamp * const t )
|
||||
{
|
||||
/* The current implementation does not support file modification timestamp
|
||||
* resolution of less than one second.
|
||||
*/
|
||||
timestamp_init( t, 1, 0 );
|
||||
}
|
||||
|
||||
int file_collect_archive_content_( file_archive_info_t * const archive );
|
||||
|
||||
/*
|
||||
|
||||
@@ -162,27 +162,6 @@ void file_query_( file_info_t * const info )
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* file_supported_fmt_resolution() - file modification timestamp resolution
|
||||
*
|
||||
* Returns the minimum file modification timestamp resolution supported by this
|
||||
* Boost Jam implementation. File modification timestamp changes of less than
|
||||
* the returned value might not be recognized.
|
||||
*
|
||||
* Does not take into consideration any OS or file system related restrictions.
|
||||
*
|
||||
* Return value 0 indicates that any value supported by the OS is also supported
|
||||
* here.
|
||||
*/
|
||||
|
||||
void file_supported_fmt_resolution( timestamp * const t )
|
||||
{
|
||||
/* The current implementation does not support file modification timestamp
|
||||
* resolution of less than one second.
|
||||
*/
|
||||
timestamp_init( t, 1, 0 );
|
||||
}
|
||||
|
||||
/*------------------------------------------------------------------------------
|
||||
* VMS-specific processing:
|
||||
*
|
||||
|
||||
@@ -533,7 +533,20 @@ static LIST * function_call_member_rule( JAM_FUNCTION * function, FRAME * frame,
|
||||
if ( module->class_module )
|
||||
{
|
||||
rule = bindrule( rulename, module );
|
||||
real_rulename = object_copy( function_rulename( rule->procedure ) );
|
||||
if ( rule->procedure )
|
||||
{
|
||||
real_rulename = object_copy( function_rulename( rule->procedure ) );
|
||||
}
|
||||
else
|
||||
{
|
||||
string buf[ 1 ];
|
||||
string_new( buf );
|
||||
string_append( buf, object_str( module->name ) );
|
||||
string_push_back( buf, '.' );
|
||||
string_append( buf, object_str( rulename ) );
|
||||
real_rulename = object_new( buf->value );
|
||||
string_free( buf );
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2434,12 +2447,17 @@ static void compile_append_chain( PARSE * parse, compiler * c )
|
||||
}
|
||||
}
|
||||
|
||||
static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
static void compile_emit_debug(compiler * c, int line)
|
||||
{
|
||||
#ifdef JAM_DEBUGGER
|
||||
if ( debug_is_debugging() )
|
||||
compile_emit( c, INSTR_DEBUG_LINE, parse->line );
|
||||
compile_emit( c, INSTR_DEBUG_LINE, line );
|
||||
#endif
|
||||
}
|
||||
|
||||
static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
{
|
||||
compile_emit_debug(c, parse->line);
|
||||
if ( parse->type == PARSE_APPEND )
|
||||
{
|
||||
compile_append_chain( parse, c );
|
||||
@@ -2495,6 +2513,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
compile_emit( c, INSTR_FOR_INIT, 0 );
|
||||
compile_set_label( c, top );
|
||||
compile_emit_branch( c, INSTR_FOR_LOOP, end );
|
||||
compile_emit_debug( c, parse->line );
|
||||
compile_emit( c, INSTR_SET, var );
|
||||
|
||||
compile_push_break_scope( c, end );
|
||||
@@ -2649,6 +2668,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
group->elems, 0 ) )->s );
|
||||
var_parse_group_free( group );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
compile_emit( c, INSTR_PUSH_LOCAL, name );
|
||||
compile_push_cleanup( c, INSTR_POP_LOCAL, name );
|
||||
compile_parse( parse->third, c, nested_result );
|
||||
@@ -2660,6 +2680,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
var_parse_group_compile( group, c );
|
||||
var_parse_group_free( group );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
|
||||
compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 );
|
||||
compile_parse( parse->third, c, nested_result );
|
||||
@@ -2671,6 +2692,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
{
|
||||
compile_parse( parse->left, c, RESULT_STACK );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
compile_emit( c, INSTR_PUSH_LOCAL_GROUP, 0 );
|
||||
compile_push_cleanup( c, INSTR_POP_LOCAL_GROUP, 0 );
|
||||
compile_parse( parse->third, c, nested_result );
|
||||
@@ -2820,6 +2842,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
group->elems, 0 ) )->s );
|
||||
var_parse_group_free( group );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
if ( result_location != RESULT_NONE )
|
||||
{
|
||||
compile_emit( c, INSTR_SET_RESULT, 1 );
|
||||
@@ -2831,6 +2854,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
var_parse_group_compile( group, c );
|
||||
var_parse_group_free( group );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
if ( result_location != RESULT_NONE )
|
||||
{
|
||||
compile_emit( c, INSTR_SET_RESULT, 1 );
|
||||
@@ -2842,6 +2866,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
{
|
||||
compile_parse( parse->left, c, RESULT_STACK );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
compile_emit_debug(c, parse->line);
|
||||
if ( result_location != RESULT_NONE )
|
||||
{
|
||||
compile_emit( c, INSTR_SET_RESULT, 1 );
|
||||
@@ -2875,6 +2900,7 @@ static void compile_parse( PARSE * parse, compiler * c, int result_location )
|
||||
compile_parse( parse->third, c, RESULT_STACK );
|
||||
compile_parse( parse->right, c, RESULT_STACK );
|
||||
|
||||
compile_emit_debug(c, parse->line);
|
||||
switch ( parse->num )
|
||||
{
|
||||
case ASSIGN_APPEND: compile_emit( c, INSTR_APPEND_ON, 0 ); break;
|
||||
|
||||
@@ -269,6 +269,7 @@ int main( int argc, char * * argv, char * * arg_environ )
|
||||
#endif
|
||||
|
||||
cwd_init();
|
||||
constants_init();
|
||||
|
||||
#ifdef JAM_DEBUGGER
|
||||
|
||||
@@ -449,8 +450,6 @@ int main( int argc, char * * argv, char * * arg_environ )
|
||||
/* ++globs.noexec; */
|
||||
}
|
||||
|
||||
constants_init();
|
||||
|
||||
{
|
||||
PROFILE_ENTER( MAIN );
|
||||
|
||||
|
||||
@@ -572,11 +572,42 @@ static void make1c( state const * const pState )
|
||||
|
||||
/* Tally success/failure for those we tried to update. */
|
||||
if ( t->progress == T_MAKE_RUNNING )
|
||||
{
|
||||
/* Invert OK/FAIL target status when FAIL_EXPECTED has been applied. */
|
||||
if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec )
|
||||
{
|
||||
switch ( t->status )
|
||||
{
|
||||
case EXEC_CMD_FAIL: t->status = EXEC_CMD_OK; break;
|
||||
case EXEC_CMD_OK: t->status = EXEC_CMD_FAIL; break;
|
||||
}
|
||||
|
||||
/* Printing failure has to be delayed until the last
|
||||
* action is completed for FAIL_EXPECTED targets.
|
||||
* Do it here.
|
||||
*/
|
||||
if ( t->status == EXEC_CMD_FAIL )
|
||||
{
|
||||
out_printf( "...failed %s ", object_str( t->actions->action->rule->name ) );
|
||||
out_printf( "%s", object_str( t->boundname ) );
|
||||
out_printf( "...\n" );
|
||||
}
|
||||
|
||||
/* Handle -q */
|
||||
if ( t->status == EXEC_CMD_FAIL && globs.quitquick )
|
||||
++quit;
|
||||
|
||||
/* Delete the target on failure. */
|
||||
if ( !( t->flags & ( T_FLAG_PRECIOUS | T_FLAG_NOTFILE ) ) &&
|
||||
!unlink( object_str( t->boundname ) ) )
|
||||
out_printf( "...removing %s\n", object_str( t->boundname ) );
|
||||
}
|
||||
switch ( t->status )
|
||||
{
|
||||
case EXEC_CMD_OK: ++counts->made; break;
|
||||
case EXEC_CMD_FAIL: ++counts->failed; break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Tell parents their dependency has been built. */
|
||||
{
|
||||
@@ -833,16 +864,6 @@ static void make1c_closure
|
||||
/* Store the target's status. */
|
||||
t->status = status_orig;
|
||||
|
||||
/* Invert OK/FAIL target status when FAIL_EXPECTED has been applied. */
|
||||
if ( t->flags & T_FLAG_FAIL_EXPECTED && !globs.noexec )
|
||||
{
|
||||
switch ( t->status )
|
||||
{
|
||||
case EXEC_CMD_FAIL: t->status = EXEC_CMD_OK; break;
|
||||
case EXEC_CMD_OK: t->status = EXEC_CMD_FAIL; break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Ignore failures for actions marked as 'ignore'. */
|
||||
if ( t->status == EXEC_CMD_FAIL && cmd->rule->actions->flags &
|
||||
RULE_IGNORE )
|
||||
@@ -873,7 +894,8 @@ static void make1c_closure
|
||||
}
|
||||
|
||||
/* Print command text on failure. */
|
||||
if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE )
|
||||
if ( t->status == EXEC_CMD_FAIL && DEBUG_MAKE &&
|
||||
! ( t->flags & T_FLAG_FAIL_EXPECTED ) )
|
||||
{
|
||||
if ( !DEBUG_EXEC )
|
||||
out_printf( "%s\n", cmd->buf->value );
|
||||
@@ -891,7 +913,8 @@ static void make1c_closure
|
||||
++intr;
|
||||
++quit;
|
||||
}
|
||||
if ( t->status == EXEC_CMD_FAIL && globs.quitquick )
|
||||
if ( t->status == EXEC_CMD_FAIL && globs.quitquick &&
|
||||
! ( t->flags & T_FLAG_FAIL_EXPECTED ) )
|
||||
++quit;
|
||||
|
||||
/* If the command was not successful remove all of its targets not marked as
|
||||
|
||||
@@ -132,6 +132,16 @@ void timestamp_current( timestamp * const t )
|
||||
FILETIME ft;
|
||||
GetSystemTimeAsFileTime( &ft );
|
||||
timestamp_from_filetime( t, &ft );
|
||||
#elif defined(_POSIX_TIMERS) && defined(CLOCK_REALTIME) && \
|
||||
(!defined(__GLIBC__) || (__GLIBC__ > 2) || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 17))
|
||||
/* Some older versions of XCode define _POSIX_TIMERS, but don't actually
|
||||
* have clock_gettime. Check CLOCK_REALTIME as well. Prior to glibc 2.17,
|
||||
* clock_gettime requires -lrt. This is a non-critical feature, so
|
||||
* we just disable it to keep bootstrapping simple.
|
||||
*/
|
||||
struct timespec ts;
|
||||
clock_gettime( CLOCK_REALTIME, &ts );
|
||||
timestamp_init( t, ts.tv_sec, ts.tv_nsec );
|
||||
#else /* OS_NT */
|
||||
timestamp_init( t, time( 0 ), 0 );
|
||||
#endif /* OS_NT */
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
|
||||
import generators ;
|
||||
import feature ;
|
||||
import toolset : flags ;
|
||||
import type ;
|
||||
import property ;
|
||||
|
||||
feature.feature bison.prefix : : free ;
|
||||
type.register Y : y ;
|
||||
@@ -17,16 +17,10 @@ rule init ( )
|
||||
{
|
||||
}
|
||||
|
||||
rule bison ( dst dst_header : src : properties * )
|
||||
{
|
||||
local r = [ property.select bison.prefix : $(properties) ] ;
|
||||
if $(r)
|
||||
{
|
||||
PREFIX_OPT on $(<) = -p $(r:G=) ;
|
||||
}
|
||||
}
|
||||
flags bison.bison PREFIX <bison.prefix> ;
|
||||
_ = " " ;
|
||||
|
||||
actions bison
|
||||
{
|
||||
bison $(PREFIX_OPT) -d -o $(<[1]) $(>)
|
||||
bison -p$(_)$(PREFIX) -d -o $(<[1]) $(>)
|
||||
}
|
||||
|
||||
@@ -94,7 +94,7 @@ rule init (
|
||||
find-tools $(docbook-xsl-dir) : $(docbook-dtd-dir) : $(boostbook-dir) ;
|
||||
|
||||
# Register generators only if we were called via "using boostbook ;"
|
||||
local reg-gen = generators.register-standard ;
|
||||
local reg-gen = generators.register-xslt ;
|
||||
$(reg-gen) boostbook.dtdxml-to-boostbook : DTDXML : XML ;
|
||||
$(reg-gen) boostbook.boostbook-to-docbook : XML : DOCBOOK ;
|
||||
$(reg-gen) boostbook.boostbook-to-tests : XML : TESTS ;
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import make ;
|
||||
import os ;
|
||||
@@ -113,18 +113,12 @@ rule init (
|
||||
project bzip2 ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local source-path = [ property.select <source> : $(options) ] ;
|
||||
source-path = $(source-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local tag = [ property.select <tag> : $(options) ] ;
|
||||
tag = $(tag:G=) ;
|
||||
local build-name = [ property.select <build-name> : $(options) ] ;
|
||||
build-name = $(build-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local source-path = [ feature.get-values <source> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
local tag = [ feature.get-values <tag> : $(options) ] ;
|
||||
local build-name = [ feature.get-values <build-name> : $(options) ] ;
|
||||
|
||||
if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
|
||||
{
|
||||
@@ -171,7 +165,7 @@ rule init (
|
||||
build-name ?= bz2 ;
|
||||
library-id = [ CALC $(library-id) + 1 ] ;
|
||||
tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
|
||||
if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
|
||||
if $(tag)
|
||||
{
|
||||
tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
|
||||
}
|
||||
|
||||
@@ -33,8 +33,7 @@ generators.register-c-compiler clang-darwin.compile.mm : OBJECTIVE_CPP : OBJ : <
|
||||
|
||||
toolset.inherit-rules clang-darwin : gcc ;
|
||||
toolset.inherit-flags clang-darwin : gcc
|
||||
: <inlining>off <inlining>on <inlining>full <optimization>space
|
||||
<warnings>off <warnings>all <warnings>on
|
||||
: <inlining>full
|
||||
<architecture>x86/<address-model>32
|
||||
<architecture>x86/<address-model>64
|
||||
;
|
||||
@@ -65,8 +64,7 @@ rule init ( version ? : command * : options * )
|
||||
: version $(version) ] ;
|
||||
|
||||
common.handle-options clang-darwin : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags clang darwin $(condition) ;
|
||||
clang.init-cxxstd-flags clang-darwin : $(condition) : $(version) ;
|
||||
|
||||
# - Ranlib.
|
||||
local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
|
||||
@@ -79,42 +77,27 @@ rule init ( version ? : command * : options * )
|
||||
|
||||
SPACE = " " ;
|
||||
|
||||
toolset.flags clang-darwin.compile OPTIONS <cflags> ;
|
||||
toolset.flags clang-darwin.compile.c++ OPTIONS <cxxflags> ;
|
||||
toolset.flags clang-darwin.compile.m OPTIONS <mflags> ;
|
||||
toolset.flags clang-darwin.compile.mm OPTIONS <mflags> ;
|
||||
toolset.flags clang-darwin.compile.mm OPTIONS <mmflags> ;
|
||||
# toolset.flags clang-darwin.compile INCLUDES <include> ;
|
||||
|
||||
# Declare flags and action for compilation.
|
||||
toolset.flags clang-darwin.compile OPTIONS <optimization>off : -O0 ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <optimization>speed : -O3 ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <optimization>space : -Os ;
|
||||
|
||||
# For clang, 'on' and 'full' are identical
|
||||
toolset.flags clang-darwin.compile OPTIONS <inlining>off : -fno-inline ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <inlining>on : -Wno-inline ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <inlining>full : -Wno-inline ;
|
||||
|
||||
toolset.flags clang-darwin.compile OPTIONS <warnings>off : -w ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <warnings>on : -Wall ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <warnings>all : -Wall -pedantic ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <warnings-as-errors>on : -Werror ;
|
||||
|
||||
toolset.flags clang-darwin.compile OPTIONS <debug-symbols>on : -g ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <profiling>on : -pg ;
|
||||
toolset.flags clang-darwin.compile OPTIONS <rtti>off : -fno-rtti ;
|
||||
|
||||
# SJW 12/2017: Support for <flags> is widely inconsistant.
|
||||
# shouldn't this be handled by the common gcc?
|
||||
toolset.flags clang-darwin.compile OPTIONS <flags> ;
|
||||
|
||||
actions compile.c
|
||||
{
|
||||
"$(CONFIG_COMMAND)" -x c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
"$(CONFIG_COMMAND)" -x c $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
}
|
||||
|
||||
actions compile.c++
|
||||
{
|
||||
"$(CONFIG_COMMAND)" -x c++ $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
"$(CONFIG_COMMAND)" -x c++ $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
}
|
||||
|
||||
actions compile.m
|
||||
|
||||
@@ -36,9 +36,9 @@ type.set-generated-target-suffix PCH
|
||||
|
||||
toolset.inherit-rules clang-linux : gcc ;
|
||||
toolset.inherit-flags clang-linux : gcc
|
||||
: <inlining>off <inlining>on <inlining>full
|
||||
<optimization>space <optimization>speed
|
||||
<warnings>off <warnings>all <warnings>on ;
|
||||
: <inlining>full
|
||||
<threading>multi/<target-os>windows
|
||||
;
|
||||
|
||||
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ] {
|
||||
.debug-configuration = true ;
|
||||
@@ -60,8 +60,7 @@ rule init ( version ? : command * : options * ) {
|
||||
: version $(version) ] ;
|
||||
|
||||
common.handle-options clang-linux : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags clang linux $(condition) ;
|
||||
clang.init-cxxstd-flags clang-linux : $(condition) : $(version) ;
|
||||
|
||||
# - Ranlib.
|
||||
local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
|
||||
@@ -75,27 +74,12 @@ rule init ( version ? : command * : options * ) {
|
||||
###############################################################################
|
||||
# Flags
|
||||
|
||||
toolset.flags clang-linux.compile OPTIONS <cflags> ;
|
||||
toolset.flags clang-linux.compile.c++ OPTIONS <cxxflags> ;
|
||||
|
||||
toolset.flags clang-linux.compile OPTIONS <optimization>off : ;
|
||||
toolset.flags clang-linux.compile OPTIONS <optimization>speed : -O3 ;
|
||||
toolset.flags clang-linux.compile OPTIONS <optimization>space : -Os ;
|
||||
|
||||
# note: clang silently ignores some of these inlining options
|
||||
toolset.flags clang-linux.compile OPTIONS <inlining>off : -fno-inline ;
|
||||
# For clang, 'on' and 'full' are identical.
|
||||
toolset.flags clang-linux.compile OPTIONS <inlining>on : -Wno-inline ;
|
||||
toolset.flags clang-linux.compile OPTIONS <inlining>full : -Wno-inline ;
|
||||
|
||||
toolset.flags clang-linux.compile OPTIONS <warnings>off : -w ;
|
||||
toolset.flags clang-linux.compile OPTIONS <warnings>on : -Wall ;
|
||||
toolset.flags clang-linux.compile OPTIONS <warnings>all : -Wall -pedantic ;
|
||||
toolset.flags clang-linux.compile OPTIONS <warnings-as-errors>on : -Werror ;
|
||||
|
||||
toolset.flags clang-linux.compile OPTIONS <debug-symbols>on : -g ;
|
||||
toolset.flags clang-linux.compile OPTIONS <profiling>on : -pg ;
|
||||
toolset.flags clang-linux.compile OPTIONS <rtti>off : -fno-rtti ;
|
||||
toolset.flags clang-linux.compile OPTIONS <threading>multi/<target-os>windows : -pthread ;
|
||||
toolset.flags clang-linux.link OPTIONS <threading>multi/<target-os>windows : -pthread ;
|
||||
|
||||
###############################################################################
|
||||
# C and C++ compilation
|
||||
|
||||
@@ -69,8 +69,6 @@ rule init ( version ? : command * : options * )
|
||||
|
||||
common.handle-options clang-vxworks : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags clang-vxworks vxworks $(condition) ;
|
||||
|
||||
toolset.flags clang-vxworks.link .LD : $(linker) ;
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,9 @@
|
||||
import feature ;
|
||||
import os ;
|
||||
import toolset ;
|
||||
import sequence ;
|
||||
import regex ;
|
||||
import set ;
|
||||
|
||||
feature.extend toolset : clang ;
|
||||
feature.subfeature toolset clang : platform : : propagated link-incompatible ;
|
||||
@@ -25,3 +28,33 @@ rule init ( * : * )
|
||||
$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
local rule cxxstd-flags ( toolset : condition * : options * )
|
||||
{
|
||||
toolset.flags $(toolset).compile.c++ OPTIONS $(condition) : $(options) : unchecked ;
|
||||
toolset.flags $(toolset).link OPTIONS $(condition) : $(options) : unchecked ;
|
||||
}
|
||||
|
||||
local rule version-ge ( lhs : rhs )
|
||||
{
|
||||
lhs = [ regex.split $(lhs) "[.]" ] ;
|
||||
rhs = [ regex.split $(rhs) "[.]" ] ;
|
||||
return [ sequence.compare $(rhs) : $(lhs) : numbers.less ] ;
|
||||
}
|
||||
|
||||
# Version specific flags
|
||||
rule init-cxxstd-flags ( toolset : condition * : version )
|
||||
{
|
||||
local cxxstd = [ feature.values <cxxstd> ] ;
|
||||
local dialects = [ feature.values <cxxstd-dialect> ] ;
|
||||
dialects = [ set.difference $(dialects) : gnu iso ] ;
|
||||
local std ;
|
||||
if [ version-ge $(version) : 3.5 ] { std = 1z ; }
|
||||
else if [ version-ge $(version) : 3.4 ] { std = 14 ; }
|
||||
else if [ version-ge $(version) : 3.3 ] { std = 11 ; }
|
||||
else { std = 03 ; }
|
||||
cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>iso : -std=c++$(std) ;
|
||||
cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
|
||||
cxxstd-flags $(toolset) : $(condition)/<cxxstd>latest/<cxxstd-dialect>$(dialects) : -std=c++$(std) ;
|
||||
}
|
||||
|
||||
@@ -925,9 +925,9 @@ local rule toolset-tag ( name : type ? : property-set )
|
||||
case edg* : tag += edg ;
|
||||
case gcc* :
|
||||
{
|
||||
switch [ $(property-set).get <toolset-gcc:flavor> ]
|
||||
switch [ $(property-set).get <target-os> ]
|
||||
{
|
||||
case *mingw* : tag += mgw ;
|
||||
case *windows* : tag += mgw ;
|
||||
case * : tag += gcc ;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,9 +156,6 @@ rule init ( version ? : command * : options * : requirement * )
|
||||
flags darwin.compile OPTIONS $(condition)/<inlining>full : -Wno-inline ;
|
||||
}
|
||||
|
||||
# - Set the link flags common with the GCC toolset.
|
||||
gcc.init-link-flags darwin darwin $(condition) ;
|
||||
|
||||
# - The symbol strip program.
|
||||
local strip ;
|
||||
if <striper> in $(options)
|
||||
|
||||
@@ -105,9 +105,9 @@ rule init ( name ? )
|
||||
: DOXYFILE : DOXYGEN_XML_MULTIFILE ;
|
||||
generators.register-standard doxygen.xml-dir-to-boostbook
|
||||
: DOXYGEN_XML_MULTIFILE : BOOSTBOOK : <doxygen.processor>doxproc ;
|
||||
generators.register-standard doxygen.xml-to-boostbook
|
||||
generators.register-xslt doxygen.xml-to-boostbook
|
||||
: DOXYGEN_XML : BOOSTBOOK : <doxygen.processor>xsltproc ;
|
||||
generators.register-standard doxygen.collect
|
||||
generators.register-xslt doxygen.collect
|
||||
: DOXYGEN_XML_MULTIFILE : DOXYGEN_XML ;
|
||||
generators.register-standard doxygen.run
|
||||
: DOXYFILE : DOXYGEN_HTML_MULTIFILE ;
|
||||
@@ -274,6 +274,7 @@ rule translate-path ( path )
|
||||
}
|
||||
}
|
||||
|
||||
toolset.uses-features doxygen.headers-to-doxyfile : "<doxygen:param>" ;
|
||||
|
||||
# Generates a doxygen configuration file (doxyfile) given a set of C++ sources
|
||||
# and a property list that may contain <doxygen:param> features.
|
||||
@@ -321,6 +322,7 @@ rule headers-to-doxyfile ( target : sources * : properties * )
|
||||
print.text $(text) : true ;
|
||||
}
|
||||
|
||||
toolset.uses-features doxygen.run : <doxygen.rmdir> "<doxygen:param>" ;
|
||||
|
||||
# Run Doxygen. See doxygen-action for a description of the strange properties of
|
||||
# this rule.
|
||||
@@ -406,6 +408,7 @@ rule collect ( target : source : properties * )
|
||||
: <xsl:param>doxygen.xml.path=$(native-path) ;
|
||||
}
|
||||
|
||||
toolset.uses-features doxygen.xml-to-boostbook : <prefix> <reftitle> ;
|
||||
|
||||
# Translate Doxygen XML into BoostBook.
|
||||
#
|
||||
|
||||
10
src/tools/features/relevant-feature.jam
Normal file
10
src/tools/features/relevant-feature.jam
Normal file
@@ -0,0 +1,10 @@
|
||||
# Copyright 2017 Steven Watanabe
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Identifies relevant features.
|
||||
|
||||
import feature ;
|
||||
|
||||
feature.feature relevant : : incidental free ;
|
||||
@@ -25,5 +25,5 @@ rule detect ( properties * )
|
||||
local ps = [ property-set.create $(properties) ] ;
|
||||
local api = [ $(ps).get <threadapi> ] ;
|
||||
if ! $(api) { api = [ get-default $(ps) ] ; }
|
||||
return <threadapi>$(api) ;
|
||||
return <threadapi>$(api) <relevant>threadapi:<relevant>target-os ;
|
||||
}
|
||||
|
||||
@@ -100,6 +100,7 @@ import property ;
|
||||
import property-set ;
|
||||
import rc ;
|
||||
import regex ;
|
||||
import sequence ;
|
||||
import set ;
|
||||
import toolset ;
|
||||
import type ;
|
||||
@@ -115,7 +116,6 @@ if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
|
||||
|
||||
|
||||
feature.extend toolset : gcc ;
|
||||
# feature.subfeature toolset gcc : flavor : : optional ;
|
||||
|
||||
toolset.inherit-generators gcc : unix : unix.link unix.link.dll ;
|
||||
toolset.inherit-flags gcc : unix ;
|
||||
@@ -179,8 +179,8 @@ rule init ( version ? : command * : options * : requirement * )
|
||||
{
|
||||
local tool-command-string = \"$(tool-command)\" ;
|
||||
tool-command-string = $(tool-command-string:J=" ") ;
|
||||
local tool-version = [ MATCH "^([0-9.]+)" :
|
||||
[ SHELL "$(tool-command-string) -dumpversion" ] ] ;
|
||||
local tool-version = [ dump-full-version
|
||||
$(tool-command-string) ] ;
|
||||
if $(tool-version) != $(version)
|
||||
{
|
||||
# Permit a match betwen a two-digit version specified by the
|
||||
@@ -262,26 +262,20 @@ rule init ( version ? : command * : options * : requirement * )
|
||||
{
|
||||
local machine = [ MATCH "^([^ ]+)" :
|
||||
[ SHELL "$(command-string) -dumpmachine" ] ] ;
|
||||
version ?= [ MATCH "^([0-9.]+)" :
|
||||
[ SHELL "$(command-string) -dumpversion" ] ] ;
|
||||
version ?= [ dump-full-version $(command-string) ] ;
|
||||
switch $(machine:L)
|
||||
{
|
||||
case *mingw* : flavor ?= mingw ;
|
||||
case *cygwin* : flavor ?= cygwin ;
|
||||
}
|
||||
}
|
||||
|
||||
local condition ;
|
||||
if $(flavor)
|
||||
{
|
||||
condition = flavor $(flavor) ;
|
||||
}
|
||||
condition = [ common.check-init-parameters gcc $(requirement) : version $(version)
|
||||
: $(condition) ] ;
|
||||
|
||||
common.handle-options gcc : $(condition) : $(command) : $(options) ;
|
||||
|
||||
init-link-flags gcc "" $(condition) ;
|
||||
|
||||
# If gcc is installed in a non-standard location, we would need to add
|
||||
# LD_LIBRARY_PATH when running programs created with it (for unit-test/run
|
||||
# rules).
|
||||
@@ -346,6 +340,8 @@ rule init ( version ? : command * : options * : requirement * )
|
||||
rc.configure $(rc) : $(condition) : <rc-type>$(rc-type) ;
|
||||
|
||||
toolset.flags gcc VERSION $(condition) : [ regex.split $(version) "[.]" ] ;
|
||||
|
||||
init-cxxstd-flags $(condition) : $(version) ;
|
||||
}
|
||||
|
||||
if [ os.name ] = NT
|
||||
@@ -356,6 +352,15 @@ if [ os.name ] = NT
|
||||
JAMSHELL = % ;
|
||||
}
|
||||
|
||||
local rule dump-full-version ( command-string )
|
||||
{
|
||||
# -dumpfullversion is only supported for gcc 7+.
|
||||
# Passing both options works, as the first one that's
|
||||
# recognized will be used.
|
||||
return [ MATCH "^([0-9.]+)" :
|
||||
[ SHELL "$(command-string) -dumpfullversion -dumpversion" ] ] ;
|
||||
}
|
||||
|
||||
# Uses -print-prog-name to get the name of the tool.
|
||||
# Converts the path to native form if using cygwin.
|
||||
rule .get-prog-name ( command-string : tool : flavor ? )
|
||||
@@ -363,7 +368,7 @@ rule .get-prog-name ( command-string : tool : flavor ? )
|
||||
local prog-name = [ NORMALIZE_PATH [ MATCH "(.*)[\n]+" :
|
||||
[ SHELL "$(command-string) -print-prog-name=$(tool)" ] ] ] ;
|
||||
|
||||
if $(flavor) != vxworks && $(flavor) != mingw && [ os.name ] = NT
|
||||
if $(flavor) = cygwin && [ os.name ] = NT
|
||||
{
|
||||
prog-name = [ cygwin.cygwin-to-windows-path $(prog-name) ] ;
|
||||
}
|
||||
@@ -374,259 +379,127 @@ rule .get-prog-name ( command-string : tool : flavor ? )
|
||||
### Functions that set options on the targets.
|
||||
###
|
||||
|
||||
rule set-fpic-options ( targets * : sources * : properties * )
|
||||
local all-os = [ feature.values <target-os> ] ;
|
||||
|
||||
local rule compile-link-flags ( * )
|
||||
{
|
||||
local link = [ feature.get-values link : $(properties) ] ;
|
||||
if $(link) = shared
|
||||
toolset.flags gcc.compile OPTIONS $(1) : $(2) ;
|
||||
toolset.flags gcc.link OPTIONS $(1) : $(2) ;
|
||||
}
|
||||
|
||||
{
|
||||
# This logic will add -fPIC for all compilations:
|
||||
#
|
||||
# lib a : a.cpp b ;
|
||||
# obj b : b.cpp ;
|
||||
# exe c : c.cpp a d ;
|
||||
# obj d : d.cpp ;
|
||||
#
|
||||
# This all is fine, except that 'd' will be compiled with -fPIC even
|
||||
# though it is not needed, as 'd' is used only in exe. However, it is
|
||||
# hard to detect where a target is going to be used. Alternatively, we
|
||||
# can set -fPIC only when main target type is LIB but than 'b' would be
|
||||
# compiled without -fPIC which would lead to link errors on x86-64. So,
|
||||
# compile everything with -fPIC.
|
||||
#
|
||||
# Yet another alternative would be to create a propagated <sharedable>
|
||||
# feature and set it when building shared libraries, but that would be
|
||||
# hard to implement and would increase the target path length even more.
|
||||
|
||||
# On Windows, fPIC is the default, and specifying -fPIC explicitly leads
|
||||
# to a warning.
|
||||
local non-windows = [ set.difference $(all-os) : cygwin windows ] ;
|
||||
compile-link-flags <link>shared/<target-os>$(non-windows) : -fPIC ;
|
||||
}
|
||||
|
||||
{
|
||||
# Handle address-model
|
||||
compile-link-flags <target-os>aix/<address-model>32 : -maix32 ;
|
||||
compile-link-flags <target-os>aix/<address-model>64 : -maix64 ;
|
||||
|
||||
compile-link-flags <target-os>hpux/<address-model>32 : -milp32 ;
|
||||
compile-link-flags <target-os>hpux/<address-model>64 : -mlp64 ;
|
||||
|
||||
local generic-os = [ set.difference $(all-os) : aix hpux ] ;
|
||||
local arch = power sparc x86 ;
|
||||
compile-link-flags <target-os>$(generic-os)/<architecture>$(arch)/<address-model>32 : -m32 ;
|
||||
compile-link-flags <target-os>$(generic-os)/<architecture>$(arch)/<address-model>64 : -m64 ;
|
||||
}
|
||||
|
||||
{
|
||||
# Handle threading
|
||||
local rule threading-flags ( * )
|
||||
{
|
||||
local target-os = [ feature.get-values target-os : $(properties) ] ;
|
||||
|
||||
# This logic will add -fPIC for all compilations:
|
||||
#
|
||||
# lib a : a.cpp b ;
|
||||
# obj b : b.cpp ;
|
||||
# exe c : c.cpp a d ;
|
||||
# obj d : d.cpp ;
|
||||
#
|
||||
# This all is fine, except that 'd' will be compiled with -fPIC even
|
||||
# though it is not needed, as 'd' is used only in exe. However, it is
|
||||
# hard to detect where a target is going to be used. Alternatively, we
|
||||
# can set -fPIC only when main target type is LIB but than 'b' would be
|
||||
# compiled without -fPIC which would lead to link errors on x86-64. So,
|
||||
# compile everything with -fPIC.
|
||||
#
|
||||
# Yet another alternative would be to create a propagated <sharedable>
|
||||
# feature and set it when building shared libraries, but that would be
|
||||
# hard to implement and would increase the target path length even more.
|
||||
|
||||
# On Windows, fPIC is the default, and specifying -fPIC explicitly leads
|
||||
# to a warning.
|
||||
if ! $(target-os) in cygwin windows
|
||||
compile-link-flags <threading>multi/$(1) : $(2) ;
|
||||
if $(3)
|
||||
{
|
||||
OPTIONS on $(targets) += -fPIC ;
|
||||
toolset.flags gcc.link FINDLIBS-SA <threading>multi/$(1) : $(3) ;
|
||||
}
|
||||
}
|
||||
|
||||
threading-flags <target-os>windows : -mthreads ;
|
||||
threading-flags <target-os>cygwin : -mthreads ;
|
||||
threading-flags <target-os>solaris : -pthreads : rt ;
|
||||
|
||||
local bsd = [ MATCH ^(.*bsd)$ : $(all-os) ] ;
|
||||
threading-flags <target-os>$(bsd) : -pthread ;
|
||||
|
||||
local no-threading = android beos haiku sgi darwin vxworks ;
|
||||
local threading-generic-os = [ set.difference $(all-os) : $(no-threading) $(bsd) windows cygwin solaris ] ;
|
||||
threading-flags <target-os>$(threading-generic-os) : -pthread : rt ;
|
||||
}
|
||||
|
||||
{
|
||||
local rule cxxstd-flags ( * )
|
||||
{
|
||||
toolset.flags gcc.compile.c++ OPTIONS $(1) : $(2) ;
|
||||
toolset.flags gcc.link OPTIONS $(1) : $(2) ;
|
||||
}
|
||||
|
||||
local cxxstd = [ feature.values <cxxstd> ] ;
|
||||
local dialects = [ feature.values <cxxstd-dialect> ] ;
|
||||
.cxxstd-dialects = [ set.difference $(dialects) : gnu iso ] ;
|
||||
# C++ latest needs to be set up on a per-toolset basis
|
||||
for local std in [ set.difference $(cxxstd) : latest ]
|
||||
{
|
||||
cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>iso : -std=c++$(std) ;
|
||||
cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
|
||||
# If we see this it's probably a mistake, but
|
||||
# toolset.flags has no way to set up diagnostics.
|
||||
cxxstd-flags <cxxstd>$(std)/<cxxstd-dialect>$(.cxxstd-dialects) : -std=c++$(std) ;
|
||||
}
|
||||
|
||||
local rule version-ge ( lhs : rhs )
|
||||
{
|
||||
lhs = [ regex.split $(lhs) "[.]" ] ;
|
||||
rhs = [ regex.split $(rhs) "[.]" ] ;
|
||||
return [ sequence.compare $(rhs) : $(lhs) : numbers.less ] ;
|
||||
}
|
||||
# Version specific flags
|
||||
local rule init-cxxstd-flags ( condition * : version )
|
||||
{
|
||||
local std ;
|
||||
if [ version-ge $(version) : 8.0 ] { std = 2a ; }
|
||||
else if [ version-ge $(version) : 5.1 ] { std = 1z ; }
|
||||
else if [ version-ge $(version) : 4.8 ] { std = 1y ; }
|
||||
else if [ version-ge $(version) : 4.7 ] { std = 11 ; }
|
||||
else if [ version-ge $(version) : 3.3 ] { std = 98 ; }
|
||||
if $(std)
|
||||
{
|
||||
cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>iso : -std=c++$(std) ;
|
||||
cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>gnu : -std=gnu++$(std) ;
|
||||
cxxstd-flags $(condition)/<cxxstd>latest/<cxxstd-dialect>$(.cxxstd-dialects) : -std=c++$(std) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rule set-address-model-options ( targets * : sources * : properties * )
|
||||
{
|
||||
local model = [ feature.get-values address-model : $(properties) ] ;
|
||||
if $(model)
|
||||
{
|
||||
local option ;
|
||||
local target-os = [ feature.get-values target-os : $(properties) ] ;
|
||||
if $(target-os) = aix
|
||||
{
|
||||
if $(model) = 32
|
||||
{
|
||||
option = -maix32 ;
|
||||
}
|
||||
else
|
||||
{
|
||||
option = -maix64 ;
|
||||
}
|
||||
}
|
||||
else if $(target-os) = hpux
|
||||
{
|
||||
if $(model) = 32
|
||||
{
|
||||
option = -milp32 ;
|
||||
}
|
||||
else
|
||||
{
|
||||
option = -mlp64 ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
local arch = [ feature.get-values architecture : $(properties) ] ;
|
||||
if $(arch) = power || $(arch) = sparc || $(arch) = x86
|
||||
{
|
||||
if $(model) = 32
|
||||
{
|
||||
option = -m32 ;
|
||||
}
|
||||
else if $(model) = 64
|
||||
{
|
||||
option = -m64 ;
|
||||
}
|
||||
}
|
||||
# For darwin, the model can be 32_64. darwin.jam will handle that
|
||||
# on its own.
|
||||
}
|
||||
OPTIONS on $(targets) += $(option) ;
|
||||
}
|
||||
}
|
||||
generators.register-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
|
||||
generators.register-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
|
||||
generators.register-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
|
||||
generators.register-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
|
||||
generators.register-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
|
||||
|
||||
rule set-threading-options ( targets * : sources * : properties * )
|
||||
{
|
||||
local threading = [ feature.get-values threading : $(properties) ] ;
|
||||
if $(threading) = multi
|
||||
{
|
||||
local target-os = [ feature.get-values target-os : $(properties) ] ;
|
||||
local host-os = [ feature.get-values host-os : $(properties) ] ;
|
||||
local toolset = [ feature.get-values toolset : $(properties) ] ;
|
||||
local option ;
|
||||
local libs ;
|
||||
|
||||
if $(toolset) = clang && $(target-os) = windows
|
||||
{
|
||||
option = -pthread ;
|
||||
}
|
||||
|
||||
switch $(target-os)
|
||||
{
|
||||
case android : # No threading options, everything is in already.
|
||||
case windows : option ?= -mthreads ;
|
||||
case cygwin : option ?= -mthreads ;
|
||||
case solaris : option ?= -pthreads ; libs = rt ;
|
||||
case beos : # No threading options.
|
||||
case haiku : # No threading options.
|
||||
case *bsd : option ?= -pthread ; # There is no -lrt on BSD.
|
||||
case sgi : # gcc on IRIX does not support multi-threading.
|
||||
case darwin : # No threading options.
|
||||
case vxworks : # No threading options.
|
||||
case * : option ?= -pthread ; libs = rt ;
|
||||
}
|
||||
if $(option)
|
||||
{
|
||||
OPTIONS on $(targets) += $(option) ;
|
||||
}
|
||||
if $(libs)
|
||||
{
|
||||
FINDLIBS-SA on $(targets) += $(libs) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
local rule zero-pad ( numbers * )
|
||||
{
|
||||
local result ;
|
||||
for local n in $(numbers)
|
||||
{
|
||||
switch $(n)
|
||||
{
|
||||
case ???? : result += $(n) ;
|
||||
case ??? : result += 0$(n) ;
|
||||
case ?? : result += 00$(n) ;
|
||||
case ? : result += 000$(n) ;
|
||||
}
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
rule set-cxxstd-options ( targets * : sources * : properties * : action )
|
||||
{
|
||||
local *targets = [ $(action).targets ] ;
|
||||
local *sources = [ $(action).sources ] ;
|
||||
local target-type = [ $(*targets[1]).type ] ;
|
||||
local source-type = [ $(*sources[1]).type ] ;
|
||||
local toolset = [ feature.get-values toolset : $(properties) ] ;
|
||||
local version = [ zero-pad [ on $(targets[1]) return $(VERSION) ] ] ;
|
||||
version = $(version[1]).$(version[2]) ;
|
||||
local cxxstd = [ feature.get-values cxxstd : $(properties) ] ;
|
||||
local cxxstd-dialect = [ feature.get-values cxxstd-dialect : $(properties) ] ;
|
||||
cxxstd-dialect ?= iso ;
|
||||
switch $(cxxstd-dialect)
|
||||
{
|
||||
case gnu : cxxstd-dialect = gnu++ ;
|
||||
case iso : cxxstd-dialect = c++ ;
|
||||
case * :
|
||||
errors.warning Unknown cxxstd-dialect $(cxxstd-dialect:E=?) .. using
|
||||
ISO dialect instead. ;
|
||||
cxxstd-dialect = c++ ;
|
||||
}
|
||||
local option ;
|
||||
if $(cxxstd) = latest
|
||||
{
|
||||
if $(toolset) = gcc
|
||||
{
|
||||
if $(version) >= 0008.0000 { option = 2a ; }
|
||||
else if $(version) >= 0005.0001 { option = 1z ; }
|
||||
else if $(version) >= 0004.0008 { option = 1y ; }
|
||||
else if $(version) >= 0004.0007 { option = 11 ; }
|
||||
else if $(version) >= 0003.0003 { option = 98 ; }
|
||||
}
|
||||
if $(toolset) = clang
|
||||
{
|
||||
if $(version) >= 0003.0005 { option = 1z ; }
|
||||
if $(version) >= 0003.0004 { option = 14 ; }
|
||||
if $(version) >= 0003.0003 { option = 11 ; }
|
||||
option ?= 03 ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
option = $(cxxstd) ;
|
||||
}
|
||||
if $(source-type) in CPP || $(target-type) in CPP_PCH EXE SHARED_LIB
|
||||
{
|
||||
OPTIONS on $(targets) += -std=$(cxxstd-dialect)$(option) ;
|
||||
}
|
||||
}
|
||||
|
||||
###
|
||||
### Compiling generators and actions.
|
||||
###
|
||||
|
||||
class gcc-c-compiling-generator : C-compiling-generator
|
||||
{
|
||||
rule action-class ( )
|
||||
{
|
||||
return gcc-c-compile-action ;
|
||||
}
|
||||
}
|
||||
|
||||
class gcc-c-compile-action : compile-action
|
||||
{
|
||||
import gcc ;
|
||||
|
||||
rule execute ( action-name targets + : sources * : properties * )
|
||||
{
|
||||
gcc.set-threading-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-address-model-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-cxxstd-options $(targets) : $(sources) : $(properties) : $(__name__) ;
|
||||
compile-action.execute $(action-name) $(targets) : $(sources) : $(properties) ;
|
||||
}
|
||||
}
|
||||
|
||||
local rule register-gcc-c-compiler ( id : source-types + : target-types + : requirements *
|
||||
: optional-properties * )
|
||||
{
|
||||
generators.register [ new gcc-c-compiling-generator $(id) : $(source-types) :
|
||||
$(target-types) : $(requirements) : $(optional-properties) ] ;
|
||||
}
|
||||
|
||||
register-gcc-c-compiler gcc.compile.c++.preprocess : CPP : PREPROCESSED_CPP : <toolset>gcc ;
|
||||
register-gcc-c-compiler gcc.compile.c.preprocess : C : PREPROCESSED_C : <toolset>gcc ;
|
||||
register-gcc-c-compiler gcc.compile.c++ : CPP : OBJ : <toolset>gcc ;
|
||||
register-gcc-c-compiler gcc.compile.c : C : OBJ : <toolset>gcc ;
|
||||
register-gcc-c-compiler gcc.compile.asm : ASM : OBJ : <toolset>gcc ;
|
||||
|
||||
class gcc-fortran-compiling-generator : fortran-compiling-generator
|
||||
{
|
||||
rule action-class ( )
|
||||
{
|
||||
return gcc-fortran-compile-action ;
|
||||
}
|
||||
}
|
||||
|
||||
class gcc-fortran-compile-action : compile-action
|
||||
{
|
||||
import gcc ;
|
||||
|
||||
rule execute ( action-name targets + : sources * : properties * )
|
||||
{
|
||||
gcc.set-threading-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-address-model-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-cxxstd-options $(targets) : $(sources) : $(properties) : $(__name__) ;
|
||||
compile-action.execute $(action-name) $(targets) : $(sources) : $(properties) ;
|
||||
}
|
||||
}
|
||||
|
||||
generators.register [ new gcc-fortran-compiling-generator
|
||||
generators.register [ new fortran-compiling-generator
|
||||
gcc.compile.fortran : FORTRAN FORTRAN90 : OBJ : <toolset>gcc ] ;
|
||||
|
||||
rule compile.c++.preprocess ( targets * : sources * : properties * )
|
||||
@@ -763,8 +636,8 @@ class gcc-pch-generator : pch-generator
|
||||
# Return result of base class and pch-file property as
|
||||
# usage-requirements.
|
||||
return
|
||||
[ property-set.create <pch-file>$(pch-file) <cflags>-Winvalid-pch ]
|
||||
$(pch-file)
|
||||
[ $(pch-file[1]).add-raw <pch-file>$(pch-file[2-]) <cflags>-Winvalid-pch ]
|
||||
$(pch-file[2-])
|
||||
;
|
||||
}
|
||||
|
||||
@@ -777,25 +650,6 @@ class gcc-pch-generator : pch-generator
|
||||
return [ generator.generated-targets $(sources)
|
||||
: $(property-set) : $(project) $(name) ] ;
|
||||
}
|
||||
|
||||
rule action-class ( )
|
||||
{
|
||||
return gcc-pch-compile-action ;
|
||||
}
|
||||
}
|
||||
|
||||
class gcc-pch-compile-action : compile-action
|
||||
{
|
||||
import gcc ;
|
||||
|
||||
rule execute ( action-name targets + : sources * : properties * )
|
||||
{
|
||||
gcc.set-threading-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-address-model-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-cxxstd-options $(targets) : $(sources) : $(properties) : $(__name__) ;
|
||||
compile-action.execute $(action-name) $(targets) : $(sources) : $(properties) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
|
||||
@@ -917,26 +771,6 @@ class gcc-linking-generator : unix-linking-generator
|
||||
$(property-set) : $(sources) ] ;
|
||||
}
|
||||
}
|
||||
|
||||
rule action-class ( )
|
||||
{
|
||||
return gcc-link-action ;
|
||||
}
|
||||
}
|
||||
|
||||
class gcc-link-action : action
|
||||
{
|
||||
import gcc ;
|
||||
|
||||
rule execute ( action-name targets + : sources * : properties * )
|
||||
{
|
||||
gcc.set-threading-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-fpic-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-address-model-options $(targets) : $(sources) : $(properties) ;
|
||||
gcc.set-cxxstd-options $(targets) : $(sources) : $(properties) : $(__name__) ;
|
||||
gcc.set-link-options $(action-name) $(targets) : $(sources) : $(properties) ;
|
||||
action.execute $(action-name) $(targets) : $(sources) : $(properties) ;
|
||||
}
|
||||
}
|
||||
|
||||
# The set of permissible input types is different on mingw. So, define two sets
|
||||
@@ -1005,230 +839,163 @@ toolset.flags gcc.link LIBRARIES <library-file> ;
|
||||
toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>windows : "-Wl,--out-implib," ;
|
||||
toolset.flags gcc.link.dll .IMPLIB-COMMAND <target-os>cygwin : "-Wl,--out-implib," ;
|
||||
|
||||
# Now, the vendor specific flags.
|
||||
# The parameter linker can be either aix, darwin, gnu, hpux, osf or sun.
|
||||
rule init-link-flags ( toolset subtool condition )
|
||||
# target specific link flags
|
||||
{
|
||||
## Need to define the linker-type feature once for each toolset module.
|
||||
if ! [ feature.valid <toolset-$(toolset):linker-type> ]
|
||||
{
|
||||
feature.subfeature toolset $(toolset) : linker-type :
|
||||
gnu aix darwin hpux osf sun : propagated link-incompatible ;
|
||||
}
|
||||
## The specification to add the linker-type is per toolset "instance".
|
||||
toolset.add-requirements
|
||||
$(condition),<target-os>aix:<toolset-$(toolset):linker-type>aix
|
||||
$(condition),<target-os>darwin:<toolset-$(toolset):linker-type>darwin
|
||||
$(condition),<target-os>hpux:<toolset-$(toolset):linker-type>hpux
|
||||
$(condition),<target-os>osf:<toolset-$(toolset):linker-type>osf
|
||||
$(condition),<target-os>solaris:<toolset-$(toolset):linker-type>sun
|
||||
;
|
||||
}
|
||||
# aix
|
||||
|
||||
rule set-link-options ( action-name targets + : sources * : properties * )
|
||||
{
|
||||
local toolset = [ feature.get-values <toolset> : $(properties) ] ;
|
||||
local linker-type = [ feature.get-values <toolset-$(toolset):linker-type> : $(properties) ] ;
|
||||
local target-os = [ feature.get-values <target-os> : $(properties) ] ;
|
||||
# On AIX we *have* to use the native linker.
|
||||
#
|
||||
# Using -brtl, the AIX linker will look for libraries with both the .a
|
||||
# and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
|
||||
# AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
|
||||
# file that may contain shared objects and is different from static libs
|
||||
# as on Linux.
|
||||
#
|
||||
# The -bnoipath strips the prepending (relative) path of libraries from
|
||||
# the loader section in the target library or executable. Hence, during
|
||||
# load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
|
||||
# -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
|
||||
# this option, the prepending (relative) path + library name is
|
||||
# hard-coded in the loader section, causing *only* this path to be
|
||||
# searched during load-time. Note that the AIX linker does not have an
|
||||
# -soname equivalent, this is as close as it gets.
|
||||
#
|
||||
# The -bbigtoc option instrcuts the linker to create a TOC bigger than 64k.
|
||||
# This is neccesary for some submodules such as math, but it does make running
|
||||
# the tests a tad slower.
|
||||
#
|
||||
# The above options are definately for AIX 5.x, and most likely also for
|
||||
# AIX 4.x and AIX 6.x. For details about the AIX linker see:
|
||||
# http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
|
||||
#
|
||||
toolset.flags gcc.link OPTIONS <target-os>aix : -Wl,-brtl -Wl,-bnoipath -Wl,-bbigtoc ;
|
||||
|
||||
switch $(linker-type:G=)
|
||||
{
|
||||
case aix :
|
||||
# See note [1]
|
||||
toolset.flags gcc.link OPTIONS <target-os>aix/<runtime-link>static : -static ;
|
||||
|
||||
# On AIX we *have* to use the native linker.
|
||||
#
|
||||
# Using -brtl, the AIX linker will look for libraries with both the .a
|
||||
# and .so extensions, such as libfoo.a and libfoo.so. Without -brtl, the
|
||||
# AIX linker looks only for libfoo.a. Note that libfoo.a is an archived
|
||||
# file that may contain shared objects and is different from static libs
|
||||
# as on Linux.
|
||||
#
|
||||
# The -bnoipath strips the prepending (relative) path of libraries from
|
||||
# the loader section in the target library or executable. Hence, during
|
||||
# load-time LIBPATH (identical to LD_LIBRARY_PATH) or a hard-coded
|
||||
# -blibpath (*similar* to -lrpath/-lrpath-link) is searched. Without
|
||||
# this option, the prepending (relative) path + library name is
|
||||
# hard-coded in the loader section, causing *only* this path to be
|
||||
# searched during load-time. Note that the AIX linker does not have an
|
||||
# -soname equivalent, this is as close as it gets.
|
||||
#
|
||||
# The -bbigtoc option instrcuts the linker to create a TOC bigger than 64k.
|
||||
# This is neccesary for some submodules such as math, but it does make running
|
||||
# the tests a tad slower.
|
||||
#
|
||||
# The above options are definately for AIX 5.x, and most likely also for
|
||||
# AIX 4.x and AIX 6.x. For details about the AIX linker see:
|
||||
# http://download.boulder.ibm.com/ibmdl/pub/software/dw/aix/es-aix_ll.pdf
|
||||
#
|
||||
# darwin
|
||||
|
||||
OPTIONS on $(targets) += -Wl,-brtl -Wl,-bnoipath -Wl,-bbigtoc ;
|
||||
# On Darwin, the -s option to ld does not work unless we pass -static,
|
||||
# and passing -static unconditionally is a bad idea. So, do not pass -s
|
||||
# at all and darwin.jam will use a separate 'strip' invocation.
|
||||
toolset.flags gcc.link RPATH <target-os>darwin : <dll-path> ;
|
||||
# This does not support -R.
|
||||
toolset.flags gcc.link RPATH_OPTION <target-os>darwin : -rpath ;
|
||||
# -rpath-link is not supported at all.
|
||||
|
||||
# See note [1]
|
||||
if <runtime-link>static in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -static ;
|
||||
}
|
||||
|
||||
case darwin :
|
||||
|
||||
# On Darwin, the -s option to ld does not work unless we pass -static,
|
||||
# and passing -static unconditionally is a bad idea. So, do not pass -s
|
||||
# at all and darwin.jam will use a separate 'strip' invocation.
|
||||
RPATH on $(targets) +=
|
||||
[ feature.get-values <dll-path> : $(properties) ] ;
|
||||
# This does not support -R.
|
||||
RPATH_OPTION on $(targets) += -rpath ;
|
||||
# -rpath-link is not supported at all.
|
||||
|
||||
# See note [1]
|
||||
if <runtime-link>static in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -static ;
|
||||
}
|
||||
# See note [1]
|
||||
toolset.flags gcc.link OPTIONS <target-os>darwin/<runtime-link>static : -static ;
|
||||
|
||||
case vxworks :
|
||||
# On VxWorks we want to reflect what ever special flags have been set in the
|
||||
# environment for the CPU we are targeting in the cross build
|
||||
toolset.flags $(toolset).link OPTIONS $(condition)/<strip>on : -Wl,--strip-all : unchecked ;
|
||||
toolset.flags $(toolset).link OPTIONS $(condition)/<link>static : [ os.environ LDFLAGS_STATIC ] : unchecked ;
|
||||
toolset.flags $(toolset).link.dll OPTIONS $(condition) : [ os.environ LDFLAGS_SO ] : unchecked ;
|
||||
toolset.flags $(toolset).link OPTIONS $(condition)/<link>shared : [ os.environ LDFLAGS_DYNAMIC ] : unchecked ;
|
||||
# vxworks
|
||||
# On VxWorks we want to reflect what ever special flags have been set in the
|
||||
# environment for the CPU we are targeting in the cross build
|
||||
toolset.flags gcc.link OPTIONS <target-os>vxworks/<strip>on : -Wl,--strip-all ;
|
||||
toolset.flags gcc.link OPTIONS <target-os>vxworks/<link>static : [ os.environ LDFLAGS_STATIC ] ;
|
||||
toolset.flags gcc.link.dll OPTIONS <target-os>vxworks : [ os.environ LDFLAGS_SO ] ;
|
||||
toolset.flags gcc.link OPTIONS <target-os>vxworks/<link>shared : [ os.environ LDFLAGS_DYNAMIC ] ;
|
||||
|
||||
case gnu :
|
||||
# default
|
||||
|
||||
# Strip the binary when no debugging is needed. We use --strip-all flag
|
||||
# as opposed to -s since icc (intel's compiler) is generally
|
||||
# option-compatible with and inherits from the gcc toolset, but does not
|
||||
# support -s.
|
||||
if <strip>on in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -Wl,--strip-all ;
|
||||
}
|
||||
RPATH on $(targets) +=
|
||||
[ feature.get-values <dll-path> : $(properties) ] ;
|
||||
RPATH_OPTION on $(targets) += -rpath ;
|
||||
RPATH_LINK on $(targets) +=
|
||||
[ feature.get-values <xdll-path> : $(properties) ] ;
|
||||
START-GROUP on $(targets) += -Wl,--start-group ;
|
||||
END-GROUP on $(targets) += -Wl,--end-group ;
|
||||
local generic-os = [ set.difference $(all-os) : aix darwin vxworks solaris osf hpux ] ;
|
||||
# Strip the binary when no debugging is needed. We use --strip-all flag
|
||||
# as opposed to -s since icc (intel's compiler) is generally
|
||||
# option-compatible with and inherits from the gcc toolset, but does not
|
||||
# support -s.
|
||||
toolset.flags gcc.link OPTIONS <target-os>$(generic-os)/<strip>on :
|
||||
-Wl,--strip-all ;
|
||||
toolset.flags gcc.link RPATH <target-os>$(generic-os) : <dll-path> ;
|
||||
toolset.flags gcc.link RPATH_OPTION <target-os>$(generic-os) : -rpath ;
|
||||
toolset.flags gcc.link RPATH_LINK <target-os>$(generic-os) : <xdll-path> ;
|
||||
toolset.flags gcc.link START-GROUP <target-os>$(generic-os) :
|
||||
-Wl,--start-group ;
|
||||
toolset.flags gcc.link END-GROUP <target-os>$(generic-os) : -Wl,--end-group ;
|
||||
|
||||
# gnu ld has the ability to change the search behaviour for libraries
|
||||
# referenced by the -l switch. These modifiers are -Bstatic and
|
||||
# -Bdynamic and change search for -l switches that follow them. The
|
||||
# following list shows the tried variants. Search stops at the first
|
||||
# variant that has a match.
|
||||
#
|
||||
# *nix: -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
#
|
||||
# *nix: -Bdynamic -lxxx
|
||||
# libxxx.so
|
||||
# libxxx.a
|
||||
#
|
||||
# windows (mingw, cygwin) -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
#
|
||||
# windows (mingw, cygwin) -Bdynamic -lxxx
|
||||
# libxxx.dll.a
|
||||
# xxx.dll.a
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
# cygxxx.dll (*)
|
||||
# libxxx.dll
|
||||
# xxx.dll
|
||||
# libxxx.a
|
||||
#
|
||||
# (*) This is for cygwin
|
||||
# Please note that -Bstatic and -Bdynamic are not a guarantee that a
|
||||
# static or dynamic lib indeed gets linked in. The switches only change
|
||||
# search patterns!
|
||||
# gnu ld has the ability to change the search behaviour for libraries
|
||||
# referenced by the -l switch. These modifiers are -Bstatic and
|
||||
# -Bdynamic and change search for -l switches that follow them. The
|
||||
# following list shows the tried variants. Search stops at the first
|
||||
# variant that has a match.
|
||||
#
|
||||
# *nix: -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
#
|
||||
# *nix: -Bdynamic -lxxx
|
||||
# libxxx.so
|
||||
# libxxx.a
|
||||
#
|
||||
# windows (mingw, cygwin) -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
#
|
||||
# windows (mingw, cygwin) -Bdynamic -lxxx
|
||||
# libxxx.dll.a
|
||||
# xxx.dll.a
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
# cygxxx.dll (*)
|
||||
# libxxx.dll
|
||||
# xxx.dll
|
||||
# libxxx.a
|
||||
#
|
||||
# (*) This is for cygwin
|
||||
# Please note that -Bstatic and -Bdynamic are not a guarantee that a
|
||||
# static or dynamic lib indeed gets linked in. The switches only change
|
||||
# search patterns!
|
||||
|
||||
# On *nix mixing shared libs with static runtime is not a good idea.
|
||||
if <runtime-link>shared in $(properties)
|
||||
{
|
||||
FINDLIBS-ST-PFX on $(targets) += -Wl,-Bstatic ;
|
||||
FINDLIBS-SA-PFX on $(targets) += -Wl,-Bdynamic ;
|
||||
}
|
||||
# On *nix mixing shared libs with static runtime is not a good idea.
|
||||
toolset.flags gcc.link FINDLIBS-ST-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bstatic ;
|
||||
toolset.flags gcc.link FINDLIBS-SA-PFX <target-os>$(generic-os)/<runtime-link>shared : -Wl,-Bdynamic ;
|
||||
|
||||
# On windows allow mixing of static and dynamic libs with static
|
||||
# runtime is not a good idea.
|
||||
if <runtime-link>static in $(properties) && <target-os>windows in $(properties)
|
||||
{
|
||||
FINDLIBS-ST-PFX on $(targets) += -Wl,-Bstatic ;
|
||||
FINDLIBS-SA-PFX on $(targets) += -Wl,-Bdynamic ;
|
||||
OPTIONS on $(targets) += -Wl,-Bstatic ;
|
||||
}
|
||||
# On windows allow mixing of static and dynamic libs with static
|
||||
# runtime is not a good idea.
|
||||
toolset.flags gcc.link FINDLIBS-ST-PFX <target-os>windows/<runtime-link>static : -Wl,-Bstatic ;
|
||||
toolset.flags gcc.link FINDLIBS-SA-PFX <target-os>windows/<runtime-link>static : -Wl,-Bdynamic ;
|
||||
toolset.flags gcc.link OPTIONS <target-os>windows/<runtime-link>static : -Wl,-Bstatic ;
|
||||
|
||||
HAVE_SONAME on $(targets) += "" ;
|
||||
SONAME_OPTION on $(targets) += -h ;
|
||||
toolset.flags gcc.link HAVE_SONAME <target-os>$(generic-os) : "" ;
|
||||
toolset.flags gcc.link SONAME_OPTION <target-os>$(generic-os) : -h ;
|
||||
|
||||
# See note [1]
|
||||
if <runtime-link>static in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -static ;
|
||||
}
|
||||
# See note [1]
|
||||
toolset.flags gcc.link OPTIONS <target-os>$(generic-os)/<runtime-link>static : -static ;
|
||||
|
||||
case hpux :
|
||||
# hpux
|
||||
|
||||
if <strip>on in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -Wl,-s ;
|
||||
}
|
||||
if <link>shared in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -fPIC ;
|
||||
}
|
||||
toolset.flags gcc.link OPTIONS <target-os>hpux/<strip>on : -Wl,-s ;
|
||||
|
||||
HAVE_SONAME on $(targets) += "" ;
|
||||
SONAME_OPTION on $(targets) += +h ;
|
||||
toolset.flags gcc.link HAVE_SONAME <target-os>hpux : "" ;
|
||||
toolset.flags gcc.link SONAME_OPTION <target-os>hpux : +h ;
|
||||
|
||||
case osf :
|
||||
# osf
|
||||
|
||||
# No --strip-all, just -s.
|
||||
OPTIONS
|
||||
<toolset-$(toolset):linker-type>osf/$(condition)/<strip>on
|
||||
: -Wl,-s
|
||||
: unchecked ;
|
||||
RPATH on $(targets) += [ feature.get-values <dll-path> ] ;
|
||||
# This does not support -R.
|
||||
RPATH_OPTION on $(targets) += -rpath ;
|
||||
# -rpath-link is not supported at all.
|
||||
# No --strip-all, just -s.
|
||||
toolset.flags gcc.link OPTIONS <target-os>osf/<strip>on : -Wl,-s ;
|
||||
toolset.flags gcc.link RPATH <target-os>osf : <dll-path> ;
|
||||
# This does not support -R.
|
||||
toolset.flags gcc.link RPATH_OPTION <target-os>osf : -rpath ;
|
||||
# -rpath-link is not supported at all.
|
||||
|
||||
# See note [1]
|
||||
if <runtime-link>static in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -static ;
|
||||
}
|
||||
# See note [1]
|
||||
toolset.flags gcc.link OPTIONS <target-os>osf/<runtime-link>static : -static ;
|
||||
|
||||
case sun :
|
||||
# sun
|
||||
|
||||
if <strip>on in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -Wl,-s ;
|
||||
}
|
||||
RPATH on $(targets) += [ feature.get-values <dll-path> ] ;
|
||||
# Solaris linker does not have a separate -rpath-link, but allows using
|
||||
# -L for the same purpose.
|
||||
LINKPATH on $(targets) += [ feature.get-values <xdll-path> ] ;
|
||||
toolset.flags gcc.link OPTIONS <target-os>solaris/<strip>on : -Wl,-s ;
|
||||
|
||||
# This permits shared libraries with non-PIC code on Solaris.
|
||||
# VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
|
||||
# following is not needed. Whether -fPIC should be hardcoded, is a
|
||||
# separate question.
|
||||
# AH, 2004/10/16: it is still necessary because some tests link against
|
||||
# static libraries that were compiled without PIC.
|
||||
if <link>shared in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -mimpure-text ;
|
||||
}
|
||||
toolset.flags gcc.link RPATH <target-os>solaris : <dll-path> ;
|
||||
# Solaris linker does not have a separate -rpath-link, but allows using
|
||||
# -L for the same purpose.
|
||||
toolset.flags gcc.link LINKPATH <target-os>solaris : <xdll-path> ;
|
||||
|
||||
# See note [1]
|
||||
if <runtime-link>static in $(properties)
|
||||
{
|
||||
OPTIONS on $(targets) += -static ;
|
||||
}
|
||||
}
|
||||
# This permits shared libraries with non-PIC code on Solaris.
|
||||
# VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
|
||||
# following is not needed. Whether -fPIC should be hardcoded, is a
|
||||
# separate question.
|
||||
# AH, 2004/10/16: it is still necessary because some tests link against
|
||||
# static libraries that were compiled without PIC.
|
||||
toolset.flags gcc.link OPTIONS <target-os>solaris : -mimpure-text ;
|
||||
|
||||
# See note [1]
|
||||
toolset.flags gcc.link OPTIONS <target-os>solaris/<runtime-link>static : -static ;
|
||||
|
||||
# [1]
|
||||
# For <runtime-link>static we made sure there are no dynamic libraries in the
|
||||
|
||||
@@ -11,6 +11,7 @@ import generators ;
|
||||
#
|
||||
class archive-generator : generator
|
||||
{
|
||||
import generators ;
|
||||
import property-set ;
|
||||
|
||||
rule __init__ ( id composing ? : source-types + : target-types +
|
||||
@@ -25,6 +26,8 @@ class archive-generator : generator
|
||||
{
|
||||
sources += [ $(property-set).get <library> ] ;
|
||||
|
||||
property-set = [ $(property-set).add-raw <relevant>link ] ;
|
||||
|
||||
local result = [ generator.run $(project) $(name) : $(property-set)
|
||||
: $(sources) ] ;
|
||||
|
||||
@@ -44,7 +47,7 @@ class archive-generator : generator
|
||||
# will link to the library, but it should not cause any harm. So, return
|
||||
# all LIB sources together with created targets, so that dependants link
|
||||
# to them.
|
||||
local usage-requirements ;
|
||||
local usage-requirements = <relevant>link ;
|
||||
if [ $(property-set).get <link> ] = static
|
||||
{
|
||||
for local t in $(sources)
|
||||
@@ -56,9 +59,7 @@ class archive-generator : generator
|
||||
}
|
||||
}
|
||||
|
||||
usage-requirements = [ property-set.create $(usage-requirements) ] ;
|
||||
|
||||
return $(usage-requirements) $(result) ;
|
||||
return [ generators.add-usage-requirements $(result) : $(usage-requirements) ] ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -52,10 +52,11 @@ class lib-generator : generator
|
||||
{
|
||||
actual-type = STATIC_LIB ;
|
||||
}
|
||||
property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
|
||||
property-set = [ $(property-set).add-raw <main-target-type>LIB <relevant>link ] ;
|
||||
# Construct the target.
|
||||
return [ generators.construct $(project) $(name) : $(actual-type)
|
||||
local result = [ generators.construct $(project) $(name) : $(actual-type)
|
||||
: $(property-set) : $(sources) ] ;
|
||||
return [ $(result[1]).add-raw <relevant>link ] $(result[2-]) ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ class linking-generator : generator
|
||||
sources += [ $(property-set).get <library> ] ;
|
||||
|
||||
# Add <library-path> properties for all searched libraries.
|
||||
local extra ;
|
||||
local extra = <relevant>link ;
|
||||
for local s in $(sources)
|
||||
{
|
||||
if [ $(s).type ] = SEARCHED_LIB
|
||||
@@ -55,15 +55,8 @@ class linking-generator : generator
|
||||
{
|
||||
if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
|
||||
{
|
||||
# Unfortunately, we do not have a good way to find the path to a
|
||||
# file, so use this nasty approach.
|
||||
#
|
||||
# TODO: This needs to be done better. One thing that is really
|
||||
# broken with this is that it does not work correctly with
|
||||
# projects having multiple source locations.
|
||||
local p = [ $(s).project ] ;
|
||||
local location = [ path.root [ $(s).name ]
|
||||
[ $(p).get source-location ] ] ;
|
||||
[ $(s).path ] ] ;
|
||||
extra-xdll-paths += [ path.parent $(location) ] ;
|
||||
}
|
||||
}
|
||||
@@ -90,11 +83,12 @@ class linking-generator : generator
|
||||
local ur ;
|
||||
if $(result)
|
||||
{
|
||||
ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
|
||||
ur = [ $(ur).add
|
||||
[ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
|
||||
ur = [ extra-usage-requirements $(result[2-]) : $(property-set) ] ;
|
||||
ur = [ $(ur).add-raw
|
||||
<relevant>link <xdll-path>$(extra-xdll-paths) ] ;
|
||||
ur = [ $(ur).add $(result[1]) ] ;
|
||||
}
|
||||
return $(ur) $(result) ;
|
||||
return $(ur) $(result[2-]) ;
|
||||
}
|
||||
|
||||
rule extra-usage-requirements ( created-targets * : property-set )
|
||||
|
||||
@@ -38,7 +38,7 @@ class searched-lib-generator : generator
|
||||
|
||||
local search = [ feature.get-values <search> : $(properties) ] ;
|
||||
|
||||
local a = [ new null-action $(property-set) ] ;
|
||||
local a = [ new null-action [ $(property-set).add-raw <relevant>link ] ] ;
|
||||
local lib-name = [ feature.get-values <name> : $(properties) ] ;
|
||||
lib-name ?= $(name) ;
|
||||
local t = [ new searched-lib-target $(lib-name) : $(project)
|
||||
@@ -47,7 +47,7 @@ class searched-lib-generator : generator
|
||||
# lib png : z : <name>png ;
|
||||
# the 'z' target should be returned, so that apps linking to 'png'
|
||||
# will link to 'z', too.
|
||||
return [ property-set.create <xdll-path>$(search) ]
|
||||
return [ property-set.create <xdll-path>$(search) <relevant>link ]
|
||||
[ virtual-target.register $(t) ] $(sources) ;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,8 +55,6 @@ rule init ( version ? : command * : options * )
|
||||
|
||||
common.handle-options intel-darwin : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags intel-darwin darwin $(condition) ;
|
||||
|
||||
# handle <library-path>
|
||||
# local library-path = [ feature.get-values <library-path> : $(options) ] ;
|
||||
# flags intel-darwin.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
|
||||
@@ -118,6 +116,14 @@ rule init ( version ? : command * : options * )
|
||||
if $(major) = "9" || ( $(major) = "10" && ( $(minor) = "0" || $(minor) = "1" ) ) {
|
||||
flags intel-darwin.compile DEFINES $(condition) : __WINT_TYPE__=int : unchecked ;
|
||||
}
|
||||
|
||||
# - Ranlib.
|
||||
local ranlib = [ feature.get-values <ranlib> : $(options) ] ;
|
||||
toolset.flags intel-darwin.archive .RANLIB $(condition) : $(ranlib[1]) ;
|
||||
|
||||
# - Archive builder.
|
||||
local archiver = [ feature.get-values <archiver> : $(options) ] ;
|
||||
toolset.flags intel-darwin.archive .AR $(condition) : $(archiver[1]) ;
|
||||
}
|
||||
|
||||
SPACE = " " ;
|
||||
@@ -168,6 +174,7 @@ flags intel-darwin ARFLAGS <archiveflags> ;
|
||||
# logic in intel-linux, but that's hardly worth the trouble
|
||||
# as on Linux, 'ar' is always available.
|
||||
.AR = ar ;
|
||||
.RANLIB = ranlib ;
|
||||
|
||||
rule archive ( targets * : sources * : properties * )
|
||||
{
|
||||
@@ -201,7 +208,7 @@ rule archive ( targets * : sources * : properties * )
|
||||
actions piecemeal archive
|
||||
{
|
||||
"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"
|
||||
"ranlib" -cs "$(<)"
|
||||
"$(.RANLIB)" -cs "$(<)"
|
||||
}
|
||||
|
||||
flags intel-darwin.link USER_OPTIONS <linkflags> ;
|
||||
|
||||
@@ -96,8 +96,6 @@ rule init ( version ? : command * : options * )
|
||||
: $(command) : $(default_path) ] ;
|
||||
|
||||
common.handle-options intel-linux : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags intel-linux gnu $(condition) ;
|
||||
|
||||
local root = [ feature.get-values <root> : $(options) ] ;
|
||||
local bin ;
|
||||
|
||||
@@ -54,8 +54,6 @@ rule init ( version ? : command * : options * )
|
||||
|
||||
common.handle-options intel-vxworks : $(condition) : $(command) : $(options) ;
|
||||
|
||||
gcc.init-link-flags intel-vxworks vxworks $(condition) ;
|
||||
|
||||
# handle <library-path>
|
||||
# local library-path = [ feature.get-values <library-path> : $(options) ] ;
|
||||
# flags intel-vxworks.link USER_OPTIONS $(condition) : [ feature.get-values <dll-path> : $(options) ] ;
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
import type ;
|
||||
import generators ;
|
||||
import feature ;
|
||||
import property ;
|
||||
|
||||
import toolset : flags ;
|
||||
|
||||
feature.feature flex.prefix : : free ;
|
||||
type.register LEX : l ;
|
||||
@@ -18,14 +17,7 @@ rule init ( )
|
||||
{
|
||||
}
|
||||
|
||||
rule lex ( target : source : properties * )
|
||||
{
|
||||
local r = [ property.select flex.prefix : $(properties) ] ;
|
||||
if $(r)
|
||||
{
|
||||
PREFIX on $(<) = $(r:G=) ;
|
||||
}
|
||||
}
|
||||
flags lex.lex PREFIX <flex.prefix> ;
|
||||
|
||||
actions lex
|
||||
{
|
||||
|
||||
@@ -14,17 +14,20 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
|
||||
header = jpeglib.h ;
|
||||
|
||||
# jpeglib.h requires stdio.h to be included first.
|
||||
header-test = "#include <stdio.h>\n#include <jpeglib.h>\n" ;
|
||||
|
||||
names = jpeg ;
|
||||
|
||||
sources = jaricom.c jcapimin.c jcapistd.c jcarith.c jccoefct.c jccolor.c
|
||||
@@ -99,7 +102,10 @@ rule init (
|
||||
|
||||
: is-default ?
|
||||
# Default configurations are only used when libjpeg
|
||||
# has not yet been configured.
|
||||
# has not yet been configured. This option is
|
||||
# deprecated. A configuration will be treated
|
||||
# as a default when none of <include>, <search>,
|
||||
# <name>, and <source> are present.
|
||||
)
|
||||
{
|
||||
local caller = [ project.current ] ;
|
||||
@@ -113,24 +119,22 @@ rule init (
|
||||
project libjpeg ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local source-path = [ property.select <source> : $(options) ] ;
|
||||
source-path = $(source-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local tag = [ property.select <tag> : $(options) ] ;
|
||||
tag = $(tag:G=) ;
|
||||
local build-name = [ property.select <build-name> : $(options) ] ;
|
||||
build-name = $(build-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local source-path = [ feature.get-values <source> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
local tag = [ feature.get-values <tag> : $(options) ] ;
|
||||
local build-name = [ feature.get-values <build-name> : $(options) ] ;
|
||||
|
||||
condition = [ property-set.create $(requirements) ] ;
|
||||
condition = [ property-set.create [ $(condition).base ] ] ;
|
||||
|
||||
local no-build-from-source ;
|
||||
# Ignore environmental ZLIB_SOURCE if this initialization
|
||||
if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
|
||||
{
|
||||
is-default = true ;
|
||||
}
|
||||
|
||||
# Ignore environmental LIBJPEG_SOURCE if this initialization
|
||||
# requested to search for a specific pre-built library.
|
||||
if $(library-path) || $(include-path) || $(library-name)
|
||||
{
|
||||
@@ -140,13 +144,11 @@ rule init (
|
||||
[ property.select <search> <include> <name> : $(options) ] "and"
|
||||
[ property.select <source> <tag> <build-name> : $(options) ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
no-build-from-source = true ;
|
||||
}
|
||||
}
|
||||
|
||||
source-path ?= [ modules.peek : ZLIB_SOURCE ] ;
|
||||
else
|
||||
{
|
||||
source-path ?= [ modules.peek : LIBJPEG_SOURCE ] ;
|
||||
}
|
||||
|
||||
if $(.configured.$(condition))
|
||||
{
|
||||
@@ -163,12 +165,12 @@ rule init (
|
||||
}
|
||||
return ;
|
||||
}
|
||||
else if $(source-path) && ! $(no-build-from-source)
|
||||
else if $(source-path)
|
||||
{
|
||||
build-name ?= jpeg ;
|
||||
library-id = [ CALC $(library-id) + 1 ] ;
|
||||
tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
|
||||
if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
|
||||
if $(tag)
|
||||
{
|
||||
tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
|
||||
}
|
||||
@@ -199,7 +201,6 @@ rule init (
|
||||
<include>$(source-path)
|
||||
<toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
|
||||
<toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
|
||||
<link>shared:<define>ZLIB_DLL
|
||||
:
|
||||
: <include>$(source-path) ] ;
|
||||
}
|
||||
@@ -225,6 +226,7 @@ rule init (
|
||||
local mt = [ new ac-library libjpeg : $(.project) : $(condition) :
|
||||
$(include-path) : $(library-path) : $(library-name) : $(root) ] ;
|
||||
$(mt).set-header $(header) ;
|
||||
$(mt).set-header-test $(header-test) ;
|
||||
$(mt).set-default-names $(names) ;
|
||||
targets.main-target-alternative $(mt) ;
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
@@ -93,7 +93,10 @@ rule init (
|
||||
|
||||
: is-default ?
|
||||
# Default configurations are only used when libpng
|
||||
# has not yet been configured.
|
||||
# has not yet been configured. This option is
|
||||
# deprecated. A configuration will be treated
|
||||
# as a default when none of <include>, <search>,
|
||||
# <name>, and <source> are present.
|
||||
)
|
||||
{
|
||||
local caller = [ project.current ] ;
|
||||
@@ -107,23 +110,21 @@ rule init (
|
||||
project libpng ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local source-path = [ property.select <source> : $(options) ] ;
|
||||
source-path = $(source-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local tag = [ property.select <tag> : $(options) ] ;
|
||||
tag = $(tag:G=) ;
|
||||
local build-name = [ property.select <build-name> : $(options) ] ;
|
||||
build-name = $(build-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local source-path = [ feature.get-values <source> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
local tag = [ feature.get-values <tag> : $(options) ] ;
|
||||
local build-name = [ feature.get-values <build-name> : $(options) ] ;
|
||||
|
||||
if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
|
||||
{
|
||||
is-default = true ;
|
||||
}
|
||||
|
||||
condition = [ property-set.create $(requirements) ] ;
|
||||
condition = [ property-set.create [ $(condition).base ] ] ;
|
||||
|
||||
local no-build-from-source ;
|
||||
# Ignore environmental LIBPNG_SOURCE if this initialization
|
||||
# requested to search for a specific pre-built library.
|
||||
if $(library-path) || $(include-path) || $(library-name)
|
||||
@@ -134,13 +135,11 @@ rule init (
|
||||
[ property.select <search> <include> <name> : $(options) ] "and"
|
||||
[ property.select <source> <tag> <build-name> : $(options) ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
no-build-from-source = true ;
|
||||
}
|
||||
}
|
||||
|
||||
source-path ?= [ modules.peek : LIBPNG_SOURCE ] ;
|
||||
else
|
||||
{
|
||||
source-path ?= [ modules.peek : LIBPNG_SOURCE ] ;
|
||||
}
|
||||
|
||||
if $(.configured.$(condition))
|
||||
{
|
||||
@@ -157,12 +156,12 @@ rule init (
|
||||
}
|
||||
return ;
|
||||
}
|
||||
else if $(source-path) && ! $(no-build-from-source)
|
||||
else if $(source-path)
|
||||
{
|
||||
build-name ?= png ;
|
||||
library-id = [ CALC $(library-id) + 1 ] ;
|
||||
tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
|
||||
if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
|
||||
if $(tag)
|
||||
{
|
||||
tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
@@ -96,7 +96,10 @@ rule init (
|
||||
|
||||
: is-default ?
|
||||
# Default configurations are only used when libtiff
|
||||
# has not yet been configured.
|
||||
# has not yet been configured. This option is
|
||||
# deprecated. A configuration will be treated
|
||||
# as a default when none of <include>, <search>,
|
||||
# <name>, and <source> are present.
|
||||
)
|
||||
{
|
||||
local caller = [ project.current ] ;
|
||||
@@ -110,24 +113,22 @@ rule init (
|
||||
project libtiff ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local source-path = [ property.select <source> : $(options) ] ;
|
||||
source-path = $(source-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local tag = [ property.select <tag> : $(options) ] ;
|
||||
tag = $(tag:G=) ;
|
||||
local build-name = [ property.select <build-name> : $(options) ] ;
|
||||
build-name = $(build-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local source-path = [ feature.get-values <source> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
local tag = [ feature.get-values <tag> : $(options) ] ;
|
||||
local build-name = [ feature.get-values <build-name> : $(options) ] ;
|
||||
|
||||
if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
|
||||
{
|
||||
is-default = true ;
|
||||
}
|
||||
|
||||
condition = [ property-set.create $(requirements) ] ;
|
||||
condition = [ property-set.create [ $(condition).base ] ] ;
|
||||
|
||||
local no-build-from-source ;
|
||||
# Ignore environmental ZLIB_SOURCE if this initialization
|
||||
# Ignore environmental LIBTIFF_SOURCE if this initialization
|
||||
# requested to search for a specific pre-built library.
|
||||
if $(library-path) || $(include-path) || $(library-name)
|
||||
{
|
||||
@@ -137,13 +138,11 @@ rule init (
|
||||
[ property.select <search> <include> <name> : $(options) ] "and"
|
||||
[ property.select <source> <tag> <build-name> : $(options) ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
no-build-from-source = true ;
|
||||
}
|
||||
}
|
||||
|
||||
source-path ?= [ modules.peek : ZLIB_SOURCE ] ;
|
||||
else
|
||||
{
|
||||
source-path ?= [ modules.peek : LIBTIFF_SOURCE ] ;
|
||||
}
|
||||
|
||||
if $(.configured.$(condition))
|
||||
{
|
||||
@@ -160,12 +159,12 @@ rule init (
|
||||
}
|
||||
return ;
|
||||
}
|
||||
else if $(source-path) && ! $(no-build-from-source)
|
||||
else if $(source-path)
|
||||
{
|
||||
build-name ?= tiff ;
|
||||
library-id = [ CALC $(library-id) + 1 ] ;
|
||||
tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
|
||||
if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
|
||||
if $(tag)
|
||||
{
|
||||
tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
|
||||
}
|
||||
@@ -196,7 +195,6 @@ rule init (
|
||||
<include>$(source-path)
|
||||
<toolset>msvc:<define>_CRT_SECURE_NO_DEPRECATE
|
||||
<toolset>msvc:<define>_SCL_SECURE_NO_DEPRECATE
|
||||
<link>shared:<define>ZLIB_DLL
|
||||
:
|
||||
: <include>$(source-path) ] ;
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ actions touch {
|
||||
$(TOUCH) "$(<)"
|
||||
}
|
||||
|
||||
rule can-symlink ( project : ps )
|
||||
rule can-symlink ( project )
|
||||
{
|
||||
if ! $(.can-symlink)
|
||||
{
|
||||
@@ -45,7 +45,7 @@ rule can-symlink ( project : ps )
|
||||
local target = [ new file-target test-symlink : :
|
||||
$(project) : [ new action $(source-target) : link.mklink ] ] ;
|
||||
|
||||
if [ configure.try-build $(target) : $(ps) : "symlinks supported" ]
|
||||
if [ configure.try-build $(target) : [ property-set.empty ] : "symlinks supported" ]
|
||||
{
|
||||
.can-symlink = true ;
|
||||
}
|
||||
@@ -64,7 +64,7 @@ if [ os.name ] = NT
|
||||
{
|
||||
|
||||
# Test for Windows junctions (mklink /J)
|
||||
rule can-junction ( project : ps )
|
||||
rule can-junction ( project )
|
||||
{
|
||||
if ! $(.can-junction)
|
||||
{
|
||||
@@ -75,7 +75,7 @@ rule can-junction ( project : ps )
|
||||
local target = [ new file-target test-junction : :
|
||||
$(project) : [ new action $(source-target) : link.junction ] ] ;
|
||||
|
||||
if [ configure.try-build $(target) : $(ps) : "junctions supported" ]
|
||||
if [ configure.try-build $(target) : [ property-set.empty ] : "junctions supported" ]
|
||||
{
|
||||
.can-junction = true ;
|
||||
}
|
||||
@@ -96,13 +96,13 @@ else
|
||||
|
||||
.can-junction = false ;
|
||||
|
||||
rule can-junction ( project : ps )
|
||||
rule can-junction ( project )
|
||||
{
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
rule can-hardlink ( project : ps )
|
||||
rule can-hardlink ( project )
|
||||
{
|
||||
if ! $(.can-hardlink)
|
||||
{
|
||||
@@ -119,7 +119,7 @@ rule can-hardlink ( project : ps )
|
||||
: [ new property-set <location-prefix>symlink ]
|
||||
] ] ;
|
||||
|
||||
if [ configure.try-build $(target) : $(ps) : "hardlinks supported" ]
|
||||
if [ configure.try-build $(target) : [ property-set.empty ] : "hardlinks supported" ]
|
||||
{
|
||||
.can-hardlink = true ;
|
||||
}
|
||||
@@ -192,10 +192,10 @@ class symlink-target-class : basic-target
|
||||
|
||||
# If we have symlinks, don't bother checking
|
||||
# for hardlinks and junctions.
|
||||
if ! [ link.can-symlink $(self.project) : $(property-set) ]
|
||||
if ! [ link.can-symlink $(self.project) ]
|
||||
{
|
||||
link.can-junction $(self.project) : $(property-set) ;
|
||||
link.can-hardlink $(self.project) : $(property-set) ;
|
||||
link.can-junction $(self.project) ;
|
||||
link.can-hardlink $(self.project) ;
|
||||
}
|
||||
|
||||
if [ $(property-set).get <location> ]
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
@@ -62,12 +62,9 @@ rule init (
|
||||
project lzma ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
|
||||
if ! $(options)
|
||||
{
|
||||
|
||||
@@ -17,6 +17,8 @@ import targets ;
|
||||
class make-target-class : basic-target
|
||||
{
|
||||
import "class" : new ;
|
||||
import indirect ;
|
||||
import toolset ;
|
||||
import type ;
|
||||
import virtual-target ;
|
||||
|
||||
@@ -34,10 +36,11 @@ class make-target-class : basic-target
|
||||
# below.
|
||||
local m = [ MATCH ^@(.*) : $(action-name) ] ;
|
||||
|
||||
local a = [ new action $(source-targets) : $(m[1]) : $(property-set) ] ;
|
||||
local relevant = [ toolset.relevant [ indirect.get-rule $(m[1]) ] ] ;
|
||||
local a = [ new action $(source-targets) : $(m[1]) : [ $(property-set).add $(relevant) ] ] ;
|
||||
local t = [ new file-target $(self.name) exact : [ type.type
|
||||
$(self.name) ] : $(self.project) : $(a) ] ;
|
||||
return [ property-set.empty ] [ virtual-target.register $(t) ] ;
|
||||
return $(relevant) [ virtual-target.register $(t) ] ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -595,6 +595,10 @@ feature mpi:processes : : free incidental ;
|
||||
# apply to mpi.capture output at the moment.
|
||||
# Redo this explicitly.
|
||||
toolset.flags mpi.capture-output ARGS <testing.arg> ;
|
||||
toolset.uses-features mpi.capture-output :
|
||||
<testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os>
|
||||
<mpi:processes> ;
|
||||
|
||||
rule capture-output ( target : sources * : properties * )
|
||||
{
|
||||
# Use the standard capture-output rule to run the tests
|
||||
|
||||
@@ -218,11 +218,14 @@ import midl ;
|
||||
import os ;
|
||||
import path ;
|
||||
import pch ;
|
||||
import project ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
import rc ;
|
||||
import set ;
|
||||
import toolset ;
|
||||
import type ;
|
||||
import virtual-target ;
|
||||
|
||||
|
||||
type.register MANIFEST : manifest ;
|
||||
@@ -325,7 +328,7 @@ rule init (
|
||||
#
|
||||
# <rewrite-setup-scripts>
|
||||
# Whether to rewrite setup scripts. New scripts will be output in
|
||||
# TEMP directory and will be used instead of originals in build actions.
|
||||
# build tree and will be used instead of originals in build actions.
|
||||
# Possible values:
|
||||
# * on - rewrite scripts, if they do not already exist (default)
|
||||
# * always - always rewrite scripts, even if they already exist
|
||||
@@ -512,7 +515,7 @@ if [ os.name ] in NT
|
||||
actions archive
|
||||
{
|
||||
if exist "$(<[1])" DEL "$(<[1])"
|
||||
$(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -520,10 +523,14 @@ else
|
||||
actions archive
|
||||
{
|
||||
$(.RM) "$(<[1])"
|
||||
$(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) $(AROPTIONS) /out:"$(<[1])" @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
}
|
||||
}
|
||||
|
||||
rule compile.asm ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
}
|
||||
|
||||
# For the assembler the following options are turned on by default:
|
||||
#
|
||||
@@ -533,12 +540,13 @@ else
|
||||
#
|
||||
actions compile.asm
|
||||
{
|
||||
$(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
|
||||
$(.SETUP) $(.ASM) -c -Zp4 -Cp -Cx -D$(DEFINES) $(ASMFLAGS) $(USER_ASMFLAGS) -Fo "$(<:W)" "$(>:W)"
|
||||
}
|
||||
|
||||
|
||||
rule compile.c ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
C++FLAGS on $(targets[1]) = ;
|
||||
get-rspline $(targets) : -TC ;
|
||||
compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
|
||||
@@ -547,6 +555,7 @@ rule compile.c ( targets + : sources * : properties * )
|
||||
|
||||
rule compile.c.preprocess ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
C++FLAGS on $(targets[1]) = ;
|
||||
get-rspline $(targets) : -TC ;
|
||||
preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
|
||||
@@ -555,6 +564,7 @@ rule compile.c.preprocess ( targets + : sources * : properties * )
|
||||
|
||||
rule compile.c.pch ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
C++FLAGS on $(targets[1]) = ;
|
||||
get-rspline $(targets[1]) : -TC ;
|
||||
get-rspline $(targets[2]) : -TC ;
|
||||
@@ -592,12 +602,12 @@ toolset.flags msvc YLOPTION : "-Yl" ;
|
||||
#
|
||||
actions compile-c-c++ bind PDB_NAME
|
||||
{
|
||||
$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
|
||||
$(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -Fo"$(<[1]:W)" $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
|
||||
}
|
||||
|
||||
actions preprocess-c-c++ bind PDB_NAME
|
||||
{
|
||||
$(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
|
||||
$(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[1]:W)" -E $(PDB_CFLAG)"$(PDB_NAME)" -Yu"$(>[3]:D=)" -Fp"$(>[2]:W)" $(CC_RSPLINE))" >"$(<[1]:W)"
|
||||
}
|
||||
|
||||
rule compile-c-c++ ( targets + : sources * )
|
||||
@@ -624,7 +634,7 @@ rule preprocess-c-c++ ( targets + : sources * )
|
||||
# syntax highlighting in the messy N-quoted code below.
|
||||
actions compile-c-c++-pch
|
||||
{
|
||||
$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
|
||||
$(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" "@($(<[1]:W).cpp:E=#include $(.escaped-double-quote)$(>[1]:D=)$(.escaped-double-quote)$(.nl))" $(.CC.FILTER)
|
||||
}
|
||||
|
||||
|
||||
@@ -633,18 +643,20 @@ actions compile-c-c++-pch
|
||||
# given as one of the source parameters.
|
||||
actions compile-c-c++-pch-s
|
||||
{
|
||||
$(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
|
||||
$(.SETUP) $(.CC) @"@($(<[1]:W).rsp:E="$(>[2]:W)" -Fo"$(<[2]:W)" -Yc"$(>[1]:D=)" $(YLOPTION)"__bjam_pch_symbol_$(>[1]:D=)" -Fp"$(<[1]:W)" $(CC_RSPLINE))" $(.CC.FILTER)
|
||||
}
|
||||
|
||||
|
||||
rule compile.c++ ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
get-rspline $(targets) : -TP ;
|
||||
compile-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
|
||||
}
|
||||
|
||||
rule compile.c++.preprocess ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
get-rspline $(targets) : -TP ;
|
||||
preprocess-c-c++ $(<) : $(>) [ on $(<) return $(PCH_FILE) ] [ on $(<) return $(PCH_HEADER) ] ;
|
||||
}
|
||||
@@ -652,6 +664,7 @@ rule compile.c++.preprocess ( targets + : sources * : properties * )
|
||||
|
||||
rule compile.c++.pch ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
get-rspline $(targets[1]) : -TP ;
|
||||
get-rspline $(targets[2]) : -TP ;
|
||||
local pch-source = [ on $(<) return $(PCH_SOURCE) ] ;
|
||||
@@ -666,31 +679,46 @@ rule compile.c++.pch ( targets + : sources * : properties * )
|
||||
}
|
||||
}
|
||||
|
||||
rule compile.idl ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
}
|
||||
|
||||
# See midl.jam for details.
|
||||
#
|
||||
actions compile.idl
|
||||
{
|
||||
$(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
|
||||
$(.SETUP) $(.IDL) /nologo @"@($(<[1]:W).rsp:E=$(.nl)"$(>:W)" $(.nl)-D$(DEFINES) $(.nl)"-I$(INCLUDES:W)" $(.nl)-U$(UNDEFS) $(.nl)$(MIDLFLAGS) $(.nl)/tlb "$(<[1]:W)" $(.nl)/h "$(<[2]:W)" $(.nl)/iid "$(<[3]:W)" $(.nl)/proxy "$(<[4]:W)" $(.nl)/dlldata "$(<[5]:W)")"
|
||||
$(.TOUCH_FILE) "$(<[4]:W)"
|
||||
$(.TOUCH_FILE) "$(<[5]:W)"
|
||||
}
|
||||
|
||||
rule compile.mc ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
}
|
||||
|
||||
actions compile.mc
|
||||
{
|
||||
$(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
|
||||
$(.SETUP) $(.MC) $(MCFLAGS) -h "$(<[1]:DW)" -r "$(<[2]:DW)" "$(>:W)"
|
||||
}
|
||||
|
||||
|
||||
rule compile.rc ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
}
|
||||
|
||||
actions compile.rc
|
||||
{
|
||||
$(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
|
||||
$(.SETUP) $(.RC) -l 0x409 -U$(UNDEFS) -D$(DEFINES) -I"$(INCLUDES:W)" -fo "$(<:W)" "$(>:W)"
|
||||
}
|
||||
|
||||
toolset.uses-features msvc.link : <embed-manifest> <embed-manifest-file> ;
|
||||
|
||||
rule link ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
if <embed-manifest>on in $(properties)
|
||||
{
|
||||
if [ feature.get-values <embed-manifest-file> : $(properties) ]
|
||||
@@ -707,6 +735,7 @@ rule link ( targets + : sources * : properties * )
|
||||
|
||||
rule link.dll ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
DEPENDS $(<) : [ on $(<) return $(DEF_FILE) ] ;
|
||||
if <embed-manifest>on in $(properties)
|
||||
{
|
||||
@@ -736,44 +765,44 @@ if [ os.name ] in NT
|
||||
{
|
||||
actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
|
||||
{
|
||||
$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
|
||||
}
|
||||
|
||||
actions manifest
|
||||
{
|
||||
if exist "$(<[1]).manifest" (
|
||||
$(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
|
||||
$(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);1"
|
||||
)
|
||||
}
|
||||
|
||||
actions manifest.user bind EMBED_MANIFEST_FILE
|
||||
{
|
||||
$(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);1"
|
||||
$(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);1"
|
||||
}
|
||||
|
||||
actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
|
||||
{
|
||||
$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
if %ERRORLEVEL% NEQ 0 EXIT %ERRORLEVEL%
|
||||
}
|
||||
|
||||
actions manifest.dll
|
||||
{
|
||||
if exist "$(<[1]).manifest" (
|
||||
$(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
|
||||
$(.SETUP) $(.MT) -manifest "$(<[1]).manifest" "-outputresource:$(<[1]);2"
|
||||
)
|
||||
}
|
||||
actions manifest.dll.user bind EMBED_MANIFEST_FILE
|
||||
{
|
||||
$(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
|
||||
$(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
actions link bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
|
||||
{
|
||||
$(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) $(LINKFLAGS) /out:"$(<[1]:W)" /LIBPATH:"$(LINKPATH:W)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
}
|
||||
|
||||
actions manifest
|
||||
@@ -785,19 +814,19 @@ else
|
||||
|
||||
actions link.dll bind DEF_FILE LIBRARIES_MENTIONED_BY_FILE
|
||||
{
|
||||
$(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
$(.SETUP) $(.LD) /DLL $(LINKFLAGS) /out:"$(<[1]:W)" /IMPLIB:"$(<[2]:W)" /LIBPATH:"$(LINKPATH:W)" /def:"$(DEF_FILE)" $(OPTIONS) @"@($(<[1]:W).rsp:E=$(.nl)"$(>)" $(.nl)$(LIBRARIES_MENTIONED_BY_FILE) $(.nl)$(LIBRARIES) $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_ST).lib" $(.nl)"$(LIBRARY_OPTION)$(FINDLIBS_SA).lib")"
|
||||
}
|
||||
|
||||
actions manifest.dll
|
||||
{
|
||||
if test -e "$(<[1]).manifest"; then
|
||||
$(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
|
||||
$(.SETUP) $(.MT) -manifest "$(<[1]:W).manifest" "-outputresource:$(<[1]:W);2"
|
||||
fi
|
||||
}
|
||||
|
||||
actions manifest.dll.user bind EMBED_MANIFEST_FILE
|
||||
{
|
||||
$(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
|
||||
$(.SETUP) $(.MT) -manifest "$(EMBED_MANIFEST_FILE)" "-outputresource:$(<[1]);2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -810,6 +839,7 @@ else
|
||||
#
|
||||
rule archive ( targets + : sources * : properties * )
|
||||
{
|
||||
set-setup-command $(targets) : $(properties) ;
|
||||
PDB_NAME on $(>) = $(<[1]:S=.pdb) ;
|
||||
LOCATE on $(<[1]:S=.pdb) = [ on $(<[1]) return $(LOCATE) ] ;
|
||||
}
|
||||
@@ -864,7 +894,7 @@ class msvc-pch-generator : pch-generator
|
||||
: $(pch-header) ] ;
|
||||
|
||||
local pch-file ;
|
||||
for local g in $(generated)
|
||||
for local g in $(generated[2-])
|
||||
{
|
||||
if [ type.is-derived [ $(g).type ] PCH ]
|
||||
{
|
||||
@@ -872,8 +902,8 @@ class msvc-pch-generator : pch-generator
|
||||
}
|
||||
}
|
||||
|
||||
return [ property-set.create <pch-header>$(pch-header)
|
||||
<pch-file>$(pch-file) ] $(generated) ;
|
||||
return [ $(generated[1]).add-raw <pch-header>$(pch-header)
|
||||
<pch-file>$(pch-file) ] $(generated[2-]) ;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -924,72 +954,34 @@ local rule auto-detect-toolset-versions ( )
|
||||
}
|
||||
}
|
||||
|
||||
# Helper rule to generate a faster alternative to MSVC setup scripts.
|
||||
# We used to call MSVC setup scripts directly in every action, however in
|
||||
# newer MSVC versions (10.0+) they make long-lasting registry queries
|
||||
# which have a significant impact on build time.
|
||||
rule maybe-rewrite-setup ( toolset : setup-script : setup-options : version : rewrite-setup ? )
|
||||
{
|
||||
local result = $(setup-script)" "$(setup-options) ;
|
||||
# At the moment we only know how to rewrite scripts with cmd shell.
|
||||
if ( [ os.name ] in NT ) && ( $(rewrite-setup) != off )
|
||||
{
|
||||
setup-script-id = b2_$(toolset)_$(version)_$(setup-script:B) ;
|
||||
if $(setup-options)-is-not-empty
|
||||
{
|
||||
setup-script-id = $(setup-script-id)_$(setup-options) ;
|
||||
}
|
||||
|
||||
if $(.$(setup-script-id))
|
||||
{
|
||||
errors.error rewriting setup script for the second time ;
|
||||
}
|
||||
|
||||
local tmpdir = [ os.environ TEMP ] ;
|
||||
local replacement = [ path.native $(tmpdir)/$(setup-script-id).cmd ] ;
|
||||
if ( $(rewrite-setup) = always ) || ( ! [ path.exists $(replacement) ] )
|
||||
{
|
||||
local original-vars = [ SPLIT_BY_CHARACTERS [ SHELL set ] : "\n" ] ;
|
||||
local new-vars = [ SPLIT_BY_CHARACTERS [ SHELL "$(setup-script) $(setup-options)>nul && set" ] : "\n" ] ;
|
||||
local diff-vars = [ set.difference $(new-vars) : $(original-vars) ] ;
|
||||
if $(diff-vars)
|
||||
{
|
||||
local target = <new-setup-script>$(replacement) ;
|
||||
FILE_CONTENTS on $(target) = "SET "$(diff-vars) ;
|
||||
ALWAYS $(target) ;
|
||||
msvc.write-setup-script $(target) ;
|
||||
UPDATE_NOW $(target) : : ignore-minus-n ;
|
||||
.$(setup-script-id) = $(replacement) ;
|
||||
result = "\""$(replacement)"\"" ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result = "\""$(replacement)"\"" ;
|
||||
}
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
actions write-setup-script
|
||||
{
|
||||
@($(STDOUT):E=$(FILE_CONTENTS:J=$(.nl))) > "$(<)"
|
||||
}
|
||||
|
||||
if [ os.name ] = NT
|
||||
{
|
||||
local rule call-batch-script ( command )
|
||||
{
|
||||
return "call $(command) >nul$(.nl)" ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
# On cygwin, we need to run both the batch script
|
||||
# and the following command in the same instance
|
||||
# of cmd.exe.
|
||||
local rule call-batch-script ( command )
|
||||
{
|
||||
return "cmd.exe /S /C call $(command) \">nul\" \"&&\" " ;
|
||||
}
|
||||
}
|
||||
|
||||
# Local helper rule to create the vcvars setup command for given architecture
|
||||
# and options.
|
||||
#
|
||||
local rule generate-setup-cmd ( version : command : parent : options * : cpu : global-setup ? : default-global-setup-options : default-setup )
|
||||
{
|
||||
local setup-prefix = "call " ;
|
||||
local setup-suffix = " >nul"$(.nl) ;
|
||||
if ! [ os.name ] in NT
|
||||
{
|
||||
setup-prefix = "cmd.exe /S /C call " ;
|
||||
setup-suffix = " \">nul\" \"&&\" " ;
|
||||
}
|
||||
|
||||
local setup-options ;
|
||||
local setup = [ feature.get-values <setup-$(cpu)> : $(options) ] ;
|
||||
|
||||
@@ -1020,19 +1012,81 @@ local rule generate-setup-cmd ( version : command : parent : options * : cpu : g
|
||||
}
|
||||
}
|
||||
|
||||
# Cygwin to Windows path translation.
|
||||
setup = "\""$(setup:W)"\"" ;
|
||||
|
||||
# Append setup options to the setup name and add the final setup
|
||||
# prefix & suffix.
|
||||
setup-options ?= "" ;
|
||||
local rewrite = [ feature.get-values <rewrite-setup-scripts> : $(options) ] ;
|
||||
setup = [ maybe-rewrite-setup msvc : $(setup:J=" ") : $(setup-options:J=" ") : $(version) : $(rewrite) ] ;
|
||||
setup = $(setup-prefix)$(setup)$(setup-suffix) ;
|
||||
|
||||
return $(setup) ;
|
||||
return $(setup) "$(setup-options:J= )" ;
|
||||
}
|
||||
|
||||
# Worker for set-setup-command. Usable in a virtual-target.action.
|
||||
rule adjust-setup-command ( new-setup : setup : properties * )
|
||||
{
|
||||
local internal = $(new-setup:S=.read) ;
|
||||
NOTFILE $(internal) ;
|
||||
local setup-options = [ property.select <msvc.setup-options> : $(properties) ] ;
|
||||
setup-options = $(setup-options:G=:E=) ;
|
||||
DEPENDS $(internal) : $(setup) ;
|
||||
DEPENDS $(new-setup) : $(internal) ;
|
||||
REBUILDS $(new-setup) : $(internal) ;
|
||||
msvc.read-setup $(internal) : $(setup) ;
|
||||
msvc.write-setup-script $(new-setup) : $(setup) ;
|
||||
__ACTION_RULE__ on $(internal) = msvc.rewrite-setup $(setup) $(setup-options) $(new-setup) ;
|
||||
}
|
||||
|
||||
# This doesn't actually do anything. It's merely
|
||||
# used as a trigger for __ACTION_RULE__.
|
||||
actions quietly read-setup { }
|
||||
|
||||
# Calculates the changes to the environment make by setup-script
|
||||
# Should be used as a callback for __ACTION_RULE__
|
||||
local rule rewrite-setup ( setup-script setup-options new-setup : target : * )
|
||||
{
|
||||
local setup-path = [ on $(setup-script) return $(LOCATE) $(SEARCH) ] ;
|
||||
setup-path = $(setup-path[1]) ;
|
||||
local command = "\"$(setup-script:G=:R=$(setup-path))\" $(setup-options)" ;
|
||||
local original-vars = [ SPLIT_BY_CHARACTERS [ SHELL set ] : "\n" ] ;
|
||||
local new-vars = [ SPLIT_BY_CHARACTERS [ SHELL "$(command) >nul && set" ] : "\n" ] ;
|
||||
local diff-vars = [ set.difference $(new-vars) : $(original-vars) ] ;
|
||||
if $(diff-vars)
|
||||
{
|
||||
FILE_CONTENTS on $(new-setup) = "REM $(command)" "SET "$(diff-vars) ;
|
||||
}
|
||||
}
|
||||
|
||||
IMPORT msvc : rewrite-setup : : msvc.rewrite-setup ;
|
||||
|
||||
# Helper rule to generate a faster alternative to MSVC setup scripts.
|
||||
# We used to call MSVC setup scripts directly in every action, however in
|
||||
# newer MSVC versions (10.0+) they make long-lasting registry queries
|
||||
# which have a significant impact on build time.
|
||||
local rule set-setup-command ( targets * : properties * )
|
||||
{
|
||||
if ! [ on $(targets) return $(.SETUP) ]
|
||||
{
|
||||
local setup-script = [ on $(targets) return $(.SETUP-SCRIPT) ] ;
|
||||
local setup-options = [ on $(targets) return $(.SETUP-OPTIONS) ] ;
|
||||
local key = .setup-command-$(setup-script:E=)-$(setup-options:E=) ;
|
||||
if ! $($(key))
|
||||
{
|
||||
properties = [ feature.expand $(properties) ] ;
|
||||
properties = [ property.select <toolset> <toolset-msvc:version> <architecture> <address-model> <windows-api> <relevant> : $(properties) ] ;
|
||||
local ps = [ property-set.create $(properties) <msvc.setup-options>$(setup-options) ] ;
|
||||
local original = [ virtual-target.from-file $(setup-script) : [ path.pwd ] : $(.project) ] ;
|
||||
local action = [ new non-scanning-action $(original) : msvc.adjust-setup-command : $(ps) ] ;
|
||||
local new-setup = [ virtual-target.register [ new file-target msvc-setup.bat exact : : $(.project) : $(action) ] ] ;
|
||||
local command = [ $(new-setup).actualize ] ;
|
||||
local path = [ on $(command) return $(LOCATE) ] ;
|
||||
local block-update = $(command:S=.nup) ;
|
||||
NOUPDATE $(block-update) ;
|
||||
NOTFILE $(block-update) ;
|
||||
DEPENDS $(block-update) : $(command) ;
|
||||
if [ on $(targets) return $(.REWRITE-SETUP) ]
|
||||
{
|
||||
ALWAYS $(command) ;
|
||||
}
|
||||
$(key) = [ call-batch-script "\"$(command:WG=:R=$(path))\" $(setup-options:E=)" ] $(block-update) ;
|
||||
}
|
||||
DEPENDS $(targets) : $($(key)[2]) ;
|
||||
.SETUP on $(targets) = $($(key)[1]) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Worker rule for toolset version configuration. Takes an explicit version id or
|
||||
# nothing in case it should configure the default toolset version (the first
|
||||
@@ -1376,29 +1430,49 @@ local rule configure-really ( version ? : options * )
|
||||
local cpu-assembler = $(assembler) ;
|
||||
cpu-assembler ?= $(default-assembler-$(c)) ;
|
||||
|
||||
toolset.flags msvc.compile .RC <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(resource-compiler) ;
|
||||
toolset.flags msvc.compile .IDL <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(idl-compiler) ;
|
||||
toolset.flags msvc.compile .MC <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(mc-compiler) ;
|
||||
toolset.flags msvc.link .MT <windows-api>$(api)/$(cpu-conditions) : $(setup-$(c))$(manifest-tool) -nologo ;
|
||||
|
||||
for api in desktop store phone
|
||||
for local api in desktop store phone
|
||||
{
|
||||
local setup-script = $(setup-$(c)) ;
|
||||
if $(api) = phone
|
||||
{
|
||||
setup-script = $(setup-phone-$(c)) ;
|
||||
}
|
||||
if $(api) = desktop
|
||||
|
||||
if <rewrite-setup-scripts>always in $(options)
|
||||
{
|
||||
toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 -nologo ;
|
||||
toolset.flags msvc .REWRITE-SETUP <windows-api>$(api)/$(cpu-conditions) : true ;
|
||||
}
|
||||
|
||||
if ! $(setup-script)
|
||||
{
|
||||
# Should we try to set up some error handling or fallbacks here?
|
||||
}
|
||||
else if <rewrite-setup-scripts>off in $(options) || [ os.name ] != NT
|
||||
{
|
||||
toolset.flags msvc .SETUP <windows-api>$(api)/$(cpu-conditions) : [ call-batch-script "\"$(setup-script[1]:W)\" $(setup-script[2-]:E=)" ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(compiler) /Zm800 /ZW /EHsc -nologo ;
|
||||
toolset.flags msvc .SETUP-SCRIPT <windows-api>$(api)/$(cpu-conditions) : $(setup-script[1]) ;
|
||||
toolset.flags msvc .SETUP-OPTIONS <windows-api>$(api)/$(cpu-conditions) : $(setup-script[2-]) ;
|
||||
}
|
||||
toolset.flags msvc.compile .ASM <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(cpu-assembler) -nologo ;
|
||||
toolset.flags msvc.link .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /NOLOGO /INCREMENTAL:NO ;
|
||||
toolset.flags msvc.archive .LD <windows-api>$(api)/$(cpu-conditions) : $(setup-script)$(linker) /lib /NOLOGO ;
|
||||
|
||||
toolset.flags msvc.compile .RC <windows-api>$(api)/$(cpu-conditions) : $(resource-compiler) ;
|
||||
toolset.flags msvc.compile .IDL <windows-api>$(api)/$(cpu-conditions) : $(idl-compiler) ;
|
||||
toolset.flags msvc.compile .MC <windows-api>$(api)/$(cpu-conditions) : $(mc-compiler) ;
|
||||
toolset.flags msvc.link .MT <windows-api>$(api)/$(cpu-conditions) : $(manifest-tool) -nologo ;
|
||||
|
||||
if $(api) = desktop
|
||||
{
|
||||
toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(compiler) /Zm800 -nologo ;
|
||||
}
|
||||
else
|
||||
{
|
||||
toolset.flags msvc.compile .CC <windows-api>$(api)/$(cpu-conditions) : $(compiler) /Zm800 /ZW /EHsc -nologo ;
|
||||
}
|
||||
toolset.flags msvc.compile .ASM <windows-api>$(api)/$(cpu-conditions) : $(cpu-assembler) -nologo ;
|
||||
toolset.flags msvc.link .LD <windows-api>$(api)/$(cpu-conditions) : $(linker) /NOLOGO /INCREMENTAL:NO ;
|
||||
toolset.flags msvc.archive .LD <windows-api>$(api)/$(cpu-conditions) : $(linker) /lib /NOLOGO ;
|
||||
}
|
||||
|
||||
if $(cc-filter)
|
||||
@@ -1536,8 +1610,8 @@ class msvc-linking-generator : linking-generator
|
||||
|
||||
if $(result)
|
||||
{
|
||||
local name-main = [ $(result[0]).name ] ;
|
||||
local action = [ $(result[0]).action ] ;
|
||||
local name-main = [ $(result[1]).name ] ;
|
||||
local action = [ $(result[1]).action ] ;
|
||||
|
||||
if [ $(property-set).get <debug-symbols> ] = "on"
|
||||
{
|
||||
@@ -1742,6 +1816,13 @@ local rule register-toolset-really ( )
|
||||
}
|
||||
|
||||
toolset.flags msvc.archive AROPTIONS <archiveflags> ;
|
||||
|
||||
# Create a project to allow building the setup scripts
|
||||
project.initialize $(__name__) ;
|
||||
.project = [ project.current ] ;
|
||||
project msvc ;
|
||||
|
||||
feature.feature msvc.setup-options : : free ;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -40,8 +40,6 @@ rule init ( version ? : command * : options * )
|
||||
# set link flags
|
||||
flags pgi.link FINDLIBS-ST : [
|
||||
feature.get-values <find-static-library> : $(options) ] : unchecked ;
|
||||
|
||||
gcc.init-link-flags pgi gnu $(condition) ;
|
||||
}
|
||||
|
||||
# Declare generators
|
||||
|
||||
@@ -1243,6 +1243,10 @@ local rule pyd-pythonpath ( source )
|
||||
toolset.flags python.capture-output ARGS <testing.arg> ;
|
||||
toolset.flags python.capture-output INPUT_FILES <testing.input-file> ;
|
||||
|
||||
toolset.uses-features python.capture-output :
|
||||
<testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os>
|
||||
<pythonpath> ;
|
||||
|
||||
rule capture-output ( target : sources * : properties * )
|
||||
{
|
||||
# Setup up a proper DLL search path. Here, $(sources[1]) is a python module
|
||||
@@ -1255,10 +1259,7 @@ rule capture-output ( target : sources * : properties * )
|
||||
PYTHONPATH += [ feature.get-values pythonpath : $(properties) ] ;
|
||||
|
||||
# After test is run, we remove the Python module, but not the Python script.
|
||||
local targets-to-remove = $(sources[2-]) ;
|
||||
targets-to-remove ?= none ;
|
||||
testing.capture-output $(target) : $(sources[1]) : $(properties) :
|
||||
$(targets-to-remove) ;
|
||||
testing.capture-output $(target) : $(sources[1]) : $(properties) ;
|
||||
|
||||
# PYTHONPATH is different; it will be interpreted by whichever Python is
|
||||
# invoked and so must follow path rules for the target os. The only OSes
|
||||
@@ -1275,7 +1276,7 @@ rule capture-output ( target : sources * : properties * )
|
||||
}
|
||||
local path-separator = [ os.path-separator [ translate-os $(target-os) ] ] ;
|
||||
local set-PYTHONPATH = [ common.variable-setting-command PYTHONPATH :
|
||||
$(PYTHONPATH:J=$(path-separator)) ] ;
|
||||
$(PYTHONPATH:E=:J=$(path-separator)) ] ;
|
||||
LAUNCHER on $(target) = $(set-PYTHONPATH) [ on $(target) return \"$(PYTHON)\" ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ import unix ;
|
||||
feature.extend toolset : qcc ;
|
||||
|
||||
toolset.inherit-generators qcc : unix : unix.link unix.link.dll ;
|
||||
generators.override builtin.lib-generator : qcc.prebuilt ;
|
||||
toolset.inherit-flags qcc : unix ;
|
||||
toolset.inherit-rules qcc : unix ;
|
||||
|
||||
@@ -101,7 +100,7 @@ actions compile.c++
|
||||
|
||||
actions compile.c
|
||||
{
|
||||
"$(CONFIG_COMMAND)" $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
"$(CONFIG_COMMAND)" -lang-c $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"
|
||||
}
|
||||
|
||||
actions compile.asm
|
||||
|
||||
@@ -89,6 +89,9 @@ class stlport-target-class : basic-target
|
||||
|
||||
local requirements ;
|
||||
requirements += <stdlib-stlport:version>$(self.version) ;
|
||||
requirements += <relevant>runtime-debugging ;
|
||||
requirements += <relevant>toolset ;
|
||||
requirements += <relevant>runtime-link ;
|
||||
self.requirements = [ property-set.create $(requirements) ] ;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
|
||||
|
||||
import alias ;
|
||||
import build-system ;
|
||||
import "class" ;
|
||||
import common ;
|
||||
import errors ;
|
||||
@@ -213,6 +214,11 @@ rule dump-tests
|
||||
}
|
||||
}
|
||||
|
||||
if ( --dump-tests in [ modules.peek : ARGV ] )
|
||||
{
|
||||
IMPORT testing : dump-tests : : testing.dump-tests ;
|
||||
build-system.add-pre-build-hook testing.dump-tests ;
|
||||
}
|
||||
|
||||
# Given a project location in normalized form (slashes are forward), compute the
|
||||
# name of the Boost library.
|
||||
@@ -325,6 +331,8 @@ generators.register-standard testing.capture-output : EXE : RUN_OUTPUT ;
|
||||
# http://article.gmane.org/gmane.comp.lib.boost.build/6353).
|
||||
generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
|
||||
|
||||
toolset.uses-features testing.expect-success : <preserve-test-targets> ;
|
||||
toolset.uses-features testing.expect-failure : <preserve-test-targets> ;
|
||||
|
||||
# The action rules called by generators.
|
||||
|
||||
@@ -333,7 +341,7 @@ generators.register-standard testing.unit-test : EXE : UNIT_TEST ;
|
||||
#
|
||||
rule expect-success ( target : dependency + : requirements * )
|
||||
{
|
||||
**passed** $(target) : $(dependency) ;
|
||||
**passed** $(target) : $(dependency) : $(requirements) ;
|
||||
}
|
||||
|
||||
|
||||
@@ -350,25 +358,18 @@ rule expect-failure ( target : dependency + : properties * )
|
||||
RMOLD $(marker) ;
|
||||
DEPENDS $(marker) : $(dependency) ;
|
||||
DEPENDS $(target) : $(marker) ;
|
||||
**passed** $(target) : $(marker) ;
|
||||
**passed** $(target) : $(marker) : $(properties) ;
|
||||
}
|
||||
|
||||
|
||||
# The rule/action combination used to report successful passing of a test.
|
||||
#
|
||||
rule **passed**
|
||||
rule **passed** ( target : sources * : properties * )
|
||||
{
|
||||
remove-test-targets $(<) ;
|
||||
|
||||
# Dump all the tests, if needed. We do it here, since dump should happen
|
||||
# only after all Jamfiles have been read, and there is no such place
|
||||
# currently defined (but there should be).
|
||||
if ! $(.dumped-tests) && ( --dump-tests in [ modules.peek : ARGV ] )
|
||||
if [ feature.get-values preserve-test-targets : $(properties) ] = off
|
||||
{
|
||||
.dumped-tests = true ;
|
||||
dump-tests ;
|
||||
remove-test-targets $(<) ;
|
||||
}
|
||||
|
||||
# Force deletion of the target, in case any dependencies failed to build.
|
||||
RMOLD $(<) ;
|
||||
}
|
||||
@@ -457,21 +458,20 @@ toolset.flags testing.capture-output ARGS <testing.arg> ;
|
||||
toolset.flags testing.capture-output INPUT_FILES <testing.input-file> ;
|
||||
toolset.flags testing.capture-output LAUNCHER <testing.launcher> ;
|
||||
|
||||
.preserve-test-targets = on ;
|
||||
toolset.uses-features testing.capture-output :
|
||||
<testing.launcher> <testing.execute> <dll-path> <xdll-path> <target-os> ;
|
||||
|
||||
if --remove-test-targets in [ modules.peek : ARGV ]
|
||||
{
|
||||
.preserve-test-targets = off ;
|
||||
feature.set-default preserve-test-targets : off ;
|
||||
}
|
||||
|
||||
|
||||
# Runs executable 'sources' and stores stdout in file 'target'. Unless
|
||||
# --preserve-test-targets command line option has been specified, removes the
|
||||
# executable. The 'target-to-remove' parameter controls what should be removed:
|
||||
# - if 'none', does not remove anything, ever
|
||||
# - if empty, removes 'source'
|
||||
# - if non-empty and not 'none', contains a list of sources to remove.
|
||||
# executable.
|
||||
#
|
||||
rule capture-output ( target : source : properties * : targets-to-remove * )
|
||||
rule capture-output ( target : source : properties * )
|
||||
{
|
||||
output-file on $(target) = $(target:S=.output) ;
|
||||
LOCATE on $(target:S=.output) = [ on $(target) return $(LOCATE) ] ;
|
||||
@@ -489,15 +489,6 @@ rule capture-output ( target : source : properties * : targets-to-remove * )
|
||||
# bug).
|
||||
DEPENDS $(target) : [ on $(target) return $(INPUT_FILES) ] ;
|
||||
|
||||
if $(targets-to-remove) = none
|
||||
{
|
||||
targets-to-remove = ;
|
||||
}
|
||||
else if ! $(targets-to-remove)
|
||||
{
|
||||
targets-to-remove = $(source) ;
|
||||
}
|
||||
|
||||
run-path-setup $(target) : $(source) : $(properties) ;
|
||||
|
||||
DISABLE_TEST_EXECUTION on $(target) = 0 ;
|
||||
@@ -506,16 +497,6 @@ rule capture-output ( target : source : properties * : targets-to-remove * )
|
||||
DISABLE_TEST_EXECUTION on $(target) = 1 ;
|
||||
}
|
||||
|
||||
if [ feature.get-values preserve-test-targets : $(properties) ] = off
|
||||
|| $(.preserve-test-targets) = off
|
||||
{
|
||||
rmtemp-sources $(target) : $(targets-to-remove) ;
|
||||
for local to-remove in $(targets-to-remove)
|
||||
{
|
||||
rmtemp-all-sources $(to-remove) ;
|
||||
}
|
||||
}
|
||||
|
||||
if ! [ feature.get-values testing.launcher : $(properties) ]
|
||||
{
|
||||
## On VMS set default launcher to MCR
|
||||
@@ -525,42 +506,19 @@ rule capture-output ( target : source : properties * : targets-to-remove * )
|
||||
|
||||
.types-to-remove = EXE OBJ ;
|
||||
|
||||
local rule remove-test-targets ( targets + )
|
||||
local rule remove-test-targets ( target )
|
||||
{
|
||||
if $(.preserve-test-targets) = off
|
||||
{
|
||||
rmtemp-all-sources $(target) ;
|
||||
}
|
||||
}
|
||||
|
||||
local rule rmtemp-all-sources ( target )
|
||||
{
|
||||
local sources ;
|
||||
local action = [ on $(target) return $(.action) ] ;
|
||||
if $(action)
|
||||
local associated-targets = [ virtual-target.traverse [ $(action).targets ] ] ;
|
||||
local targets-to-remove ;
|
||||
for local t in [ sequence.unique $(associated-targets) ]
|
||||
{
|
||||
local action-sources = [ $(action).sources ] ;
|
||||
for local source in $(action-sources)
|
||||
if [ $(t).type ] in $(.types-to-remove)
|
||||
{
|
||||
local source-type = [ $(source).type ] ;
|
||||
if $(source-type) in $(.types-to-remove)
|
||||
{
|
||||
sources += [ $(source).actual-name ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
# ECHO IGNORED: $(source) :: $(source-type) ;
|
||||
}
|
||||
}
|
||||
if $(sources)
|
||||
{
|
||||
rmtemp-sources $(target) : $(sources) ;
|
||||
for local source in $(sources)
|
||||
{
|
||||
rmtemp-all-sources $(source) ;
|
||||
}
|
||||
targets-to-remove += [ $(t).actual-name ] ;
|
||||
}
|
||||
}
|
||||
rmtemp-sources $(target) : $(targets-to-remove) ;
|
||||
}
|
||||
|
||||
local rule rmtemp-sources ( target : sources * )
|
||||
|
||||
@@ -5,4 +5,4 @@ accompanying file LICENSE_1_0.txt or copy at
|
||||
http://www.boost.org/LICENSE_1_0.txt)
|
||||
|#
|
||||
|
||||
type MANPAGE : man 0 1 1M 2 3 4 5 6 7 8 9 n p x ;
|
||||
type MANPAGE : man 1M n p x ;
|
||||
|
||||
@@ -6,14 +6,17 @@
|
||||
# This module defines rules to apply an XSLT stylesheet to an XML file using the
|
||||
# xsltproc driver, part of libxslt.
|
||||
|
||||
import "class" : new ;
|
||||
import common ;
|
||||
import feature ;
|
||||
import generators ;
|
||||
import modules ;
|
||||
import os ;
|
||||
import path ;
|
||||
import regex ;
|
||||
import sequence ;
|
||||
|
||||
import toolset ;
|
||||
import virtual-target ;
|
||||
|
||||
feature.feature xsl:param : : free ;
|
||||
feature.feature xsl:path : : free ;
|
||||
@@ -108,12 +111,54 @@ rule .is-cygwin ( xsltproc )
|
||||
}
|
||||
}
|
||||
|
||||
class xsltproc-action : action
|
||||
{
|
||||
rule adjust-properties ( property-set )
|
||||
{
|
||||
local s = [ $(self.targets[1]).creating-subvariant ] ;
|
||||
if $(s)
|
||||
{
|
||||
return [ $(property-set).add-raw
|
||||
[ $(s).implicit-includes "xsl:path" : XML ] ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
return $(property-set) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class xsltproc-generator : generator
|
||||
{
|
||||
rule action-class ( )
|
||||
{
|
||||
return xsltproc-action ;
|
||||
}
|
||||
}
|
||||
|
||||
rule register-generator ( id : source-types + : target-types + : requirements * )
|
||||
{
|
||||
if ! $(id) in $(.known-rules)
|
||||
{
|
||||
.known-rules += $(id) ;
|
||||
flags $(id) ;
|
||||
}
|
||||
generators.register [ new xsltproc-generator $(id) :
|
||||
$(source-types) : $(target-types) : $(requirements) ] ;
|
||||
}
|
||||
|
||||
IMPORT xsltproc : register-generator : : generators.register-xslt ;
|
||||
|
||||
rule flags ( rulename )
|
||||
{
|
||||
toolset.uses-features $(rulename) : <xsl:param> <catalog> : unchecked ;
|
||||
toolset.flags $(rulename) XSL-PATH : <xsl:path> : unchecked ;
|
||||
toolset.flags $(rulename) FLAGS : <flags> : unchecked ;
|
||||
}
|
||||
|
||||
rule compute-xslt-flags ( target : properties * )
|
||||
{
|
||||
# Raw flags.
|
||||
local flags = [ feature.get-values <flags> : $(properties) ] ;
|
||||
|
||||
local flags ;
|
||||
# Translate <xsl:param> into command line flags.
|
||||
for local param in [ feature.get-values <xsl:param> : $(properties) ]
|
||||
{
|
||||
@@ -121,31 +166,6 @@ rule compute-xslt-flags ( target : properties * )
|
||||
flags += --stringparam $(namevalue[1]) \"$(namevalue[2])\" ;
|
||||
}
|
||||
|
||||
# Translate <xsl:path>.
|
||||
for local path in [ feature.get-values <xsl:path> : $(properties) ]
|
||||
{
|
||||
flags += --path \"$(path:G=)\" ;
|
||||
}
|
||||
|
||||
# Take care of implicit dependencies.
|
||||
local other-deps ;
|
||||
for local dep in [ feature.get-values <implicit-dependency> : $(properties)
|
||||
]
|
||||
{
|
||||
other-deps += [ $(dep:G=).creating-subvariant ] ;
|
||||
}
|
||||
|
||||
local implicit-target-directories ;
|
||||
for local dep in [ sequence.unique $(other-deps) ]
|
||||
{
|
||||
implicit-target-directories += [ $(dep).all-target-directories ] ;
|
||||
}
|
||||
|
||||
for local dir in $(implicit-target-directories)
|
||||
{
|
||||
flags += --path \"$(dir:T)\" ;
|
||||
}
|
||||
|
||||
return $(flags) ;
|
||||
}
|
||||
|
||||
@@ -186,25 +206,27 @@ rule xslt-dir ( target : source stylesheet : properties * : dirname )
|
||||
$(dirname) : xslt-xsltproc-dir ] ;
|
||||
}
|
||||
|
||||
_ = " " ;
|
||||
|
||||
actions xslt-xsltproc.windows
|
||||
{
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<)" "$(STYLESHEET:W)" "$(>:W)"
|
||||
}
|
||||
|
||||
|
||||
actions xslt-xsltproc bind STYLESHEET
|
||||
{
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<)" "$(STYLESHEET:T)" "$(>:T)"
|
||||
}
|
||||
|
||||
|
||||
actions xslt-xsltproc-dir.windows bind STYLESHEET
|
||||
{
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:W)" --xinclude -o "$(<:D)/" "$(STYLESHEET:W)" "$(>:W)"
|
||||
}
|
||||
|
||||
|
||||
actions xslt-xsltproc-dir bind STYLESHEET
|
||||
{
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
|
||||
$(CATALOG) "$(NAME:E=xsltproc)" $(FLAGS) --path$(_)"$(XSL-PATH:T)" --xinclude -o "$(<:D)/" "$(STYLESHEET:T)" "$(>:T)"
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@
|
||||
import project ;
|
||||
import ac ;
|
||||
import errors ;
|
||||
import feature ;
|
||||
import "class" : new ;
|
||||
import targets ;
|
||||
import path ;
|
||||
import modules ;
|
||||
import errors ;
|
||||
import indirect ;
|
||||
import property ;
|
||||
import property-set ;
|
||||
@@ -111,18 +111,12 @@ rule init (
|
||||
project zlib ;
|
||||
}
|
||||
|
||||
local library-path = [ property.select <search> : $(options) ] ;
|
||||
library-path = $(library-path:G=) ;
|
||||
local include-path = [ property.select <include> : $(options) ] ;
|
||||
include-path = $(include-path:G=) ;
|
||||
local source-path = [ property.select <source> : $(options) ] ;
|
||||
source-path = $(source-path:G=) ;
|
||||
local library-name = [ property.select <name> : $(options) ] ;
|
||||
library-name = $(library-name:G=) ;
|
||||
local tag = [ property.select <tag> : $(options) ] ;
|
||||
tag = $(tag:G=) ;
|
||||
local build-name = [ property.select <build-name> : $(options) ] ;
|
||||
build-name = $(build-name:G=) ;
|
||||
local library-path = [ feature.get-values <search> : $(options) ] ;
|
||||
local include-path = [ feature.get-values <include> : $(options) ] ;
|
||||
local source-path = [ feature.get-values <source> : $(options) ] ;
|
||||
local library-name = [ feature.get-values <name> : $(options) ] ;
|
||||
local tag = [ feature.get-values <tag> : $(options) ] ;
|
||||
local build-name = [ feature.get-values <build-name> : $(options) ] ;
|
||||
|
||||
if ! $(library-path) && ! $(include-path) && ! $(source-path) && ! $(library-name)
|
||||
{
|
||||
@@ -132,7 +126,6 @@ rule init (
|
||||
condition = [ property-set.create $(requirements) ] ;
|
||||
condition = [ property-set.create [ $(condition).base ] ] ;
|
||||
|
||||
local no-build-from-source ;
|
||||
# Ignore environmental ZLIB_SOURCE if this initialization
|
||||
# requested to search for a specific pre-built library.
|
||||
if $(library-path) || $(include-path) || $(library-name)
|
||||
@@ -169,7 +162,7 @@ rule init (
|
||||
build-name ?= z ;
|
||||
library-id = [ CALC $(library-id) + 1 ] ;
|
||||
tag = [ MATCH ^@?(.*)$ : $(tag) ] ;
|
||||
if $(tag) && ! [ MATCH ^([^%]*)%([^%]+)$ : $(tag) ]
|
||||
if $(tag)
|
||||
{
|
||||
tag = [ indirect.make $(tag) : [ $(caller).project-module ] ] ;
|
||||
}
|
||||
|
||||
@@ -31,9 +31,16 @@ local rule indirect-rule ( x )
|
||||
#
|
||||
rule make ( rulename bound-args * : context ? )
|
||||
{
|
||||
context ?= [ CALLER_MODULE ] ;
|
||||
context ?= "" ;
|
||||
return $(context)%$(rulename) $(bound-args) ;
|
||||
if [ MATCH $(.pattern) : $(rulename) ]
|
||||
{
|
||||
return $(rulename) $(bound-args) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
context ?= [ CALLER_MODULE ] ;
|
||||
context ?= "" ;
|
||||
return $(context)%$(rulename) $(bound-args) ;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -156,6 +156,42 @@ rule merge ( s1 * : s2 * : ordered * )
|
||||
return $(result__) ;
|
||||
}
|
||||
|
||||
# Compares two sequences lexicagraphically
|
||||
#
|
||||
rule compare ( s1 * : s2 * : ordered * )
|
||||
{
|
||||
if ! $(ordered)
|
||||
{
|
||||
if $(s1) < $(s2)
|
||||
{
|
||||
return true ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
while true
|
||||
{
|
||||
if ! $(s2[1])-is-defined
|
||||
{
|
||||
return ;
|
||||
}
|
||||
else if ! $(s1[1])-is-defined
|
||||
{
|
||||
return true ;
|
||||
}
|
||||
else if [ $(ordered) $(s1[1]) $(s2[1]) ]
|
||||
{
|
||||
return true ;
|
||||
}
|
||||
else if [ $(ordered) $(s2[1]) $(s1[1]) ]
|
||||
{
|
||||
return ;
|
||||
}
|
||||
s1 = $(s1[2-]) ;
|
||||
s2 = $(s2[2-]) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Join the elements of s into one long string. If joint is supplied, it is used
|
||||
# as a separator.
|
||||
|
||||
@@ -92,13 +92,19 @@ def get_toolset():
|
||||
cygwin = hasattr(os, "uname") and os.uname()[0].lower().startswith("cygwin")
|
||||
windows = cygwin or os.environ.get("OS", "").lower().startswith("windows")
|
||||
|
||||
if cygwin:
|
||||
default_os = "cygwin"
|
||||
elif windows:
|
||||
default_os = "windows"
|
||||
elif hasattr(os, "uname"):
|
||||
default_os = os.uname()[0].lower()
|
||||
|
||||
def prepare_prefixes_and_suffixes(toolset):
|
||||
prepare_suffix_map(toolset)
|
||||
prepare_library_prefix(toolset)
|
||||
def prepare_prefixes_and_suffixes(toolset, target_os=default_os):
|
||||
prepare_suffix_map(toolset, target_os)
|
||||
prepare_library_prefix(toolset, target_os)
|
||||
|
||||
|
||||
def prepare_suffix_map(toolset):
|
||||
def prepare_suffix_map(toolset, target_os=default_os):
|
||||
"""
|
||||
Set up suffix translation performed by the Boost Build testing framework
|
||||
to accomodate different toolsets generating targets of the same type using
|
||||
@@ -107,11 +113,11 @@ def prepare_suffix_map(toolset):
|
||||
"""
|
||||
global suffixes
|
||||
suffixes = {}
|
||||
if windows:
|
||||
if target_os in ["windows", "cygwin"]:
|
||||
if toolset == "gcc":
|
||||
suffixes[".lib"] = ".a" # mingw static libs use suffix ".a".
|
||||
suffixes[".obj"] = ".o"
|
||||
if cygwin:
|
||||
if target_os == "cygwin":
|
||||
suffixes[".implib"] = ".lib.a"
|
||||
else:
|
||||
suffixes[".implib"] = ".lib"
|
||||
@@ -122,11 +128,11 @@ def prepare_suffix_map(toolset):
|
||||
suffixes[".obj"] = ".o"
|
||||
suffixes[".implib"] = ".no_implib_files_on_this_platform"
|
||||
|
||||
if hasattr(os, "uname") and os.uname()[0] == "Darwin":
|
||||
if target_os == "darwin":
|
||||
suffixes[".dll"] = ".dylib"
|
||||
|
||||
|
||||
def prepare_library_prefix(toolset):
|
||||
def prepare_library_prefix(toolset, target_os=default_os):
|
||||
"""
|
||||
Setup whether Boost Build is expected to automatically prepend prefixes
|
||||
to its built library targets.
|
||||
@@ -136,9 +142,9 @@ def prepare_library_prefix(toolset):
|
||||
lib_prefix = "lib"
|
||||
|
||||
global dll_prefix
|
||||
if cygwin:
|
||||
if target_os == "cygwin":
|
||||
dll_prefix = "cyg"
|
||||
elif windows and toolset != "gcc":
|
||||
elif target_os == "windows" and toolset != "gcc":
|
||||
dll_prefix = None
|
||||
else:
|
||||
dll_prefix = "lib"
|
||||
@@ -209,11 +215,11 @@ class Tester(TestCmd.TestCmd):
|
||||
def __init__(self, arguments=None, executable="bjam",
|
||||
match=TestCmd.match_exact, boost_build_path=None,
|
||||
translate_suffixes=True, pass_toolset=True, use_test_config=True,
|
||||
ignore_toolset_requirements=False, workdir="", pass_d0=True,
|
||||
ignore_toolset_requirements=False, workdir="", pass_d0=False,
|
||||
**keywords):
|
||||
|
||||
assert arguments.__class__ is not str
|
||||
self.original_workdir = os.getcwd()
|
||||
self.original_workdir = os.path.dirname(__file__)
|
||||
if workdir and not os.path.isabs(workdir):
|
||||
raise ("Parameter workdir <%s> must point to an absolute "
|
||||
"directory: " % workdir)
|
||||
@@ -274,8 +280,9 @@ class Tester(TestCmd.TestCmd):
|
||||
|
||||
# Find where jam_src is located. Try for the debug version if it is
|
||||
# lying around.
|
||||
dirs = [os.path.join("..", "src", "engine", jam_build_dir + ".debug"),
|
||||
os.path.join("..", "src", "engine", jam_build_dir)]
|
||||
srcdir = os.path.join(os.path.dirname(__file__), "..", "src")
|
||||
dirs = [os.path.join(srcdir, "engine", jam_build_dir + ".debug"),
|
||||
os.path.join(srcdir, "engine", jam_build_dir)]
|
||||
for d in dirs:
|
||||
if os.path.exists(d):
|
||||
jam_build_dir = d
|
||||
@@ -289,7 +296,8 @@ class Tester(TestCmd.TestCmd):
|
||||
verbosity = []
|
||||
if "--verbose" in sys.argv:
|
||||
keywords["verbose"] = True
|
||||
verbosity = ["-d+2"]
|
||||
verbosity = ["-d2"]
|
||||
self.verbosity = verbosity
|
||||
|
||||
if boost_build_path is None:
|
||||
boost_build_path = self.original_workdir + "/.."
|
||||
@@ -300,8 +308,6 @@ class Tester(TestCmd.TestCmd):
|
||||
else:
|
||||
program_list.append(os.path.join(jam_build_dir, executable))
|
||||
program_list.append('-sBOOST_BUILD_PATH="' + boost_build_path + '"')
|
||||
if verbosity:
|
||||
program_list += verbosity
|
||||
if arguments:
|
||||
program_list += arguments
|
||||
|
||||
@@ -320,6 +326,12 @@ class Tester(TestCmd.TestCmd):
|
||||
# this case.
|
||||
pass
|
||||
|
||||
def set_toolset(self, toolset, target_os=default_os):
|
||||
self.toolset = toolset
|
||||
self.pass_toolset = True
|
||||
prepare_prefixes_and_suffixes(toolset, target_os)
|
||||
|
||||
|
||||
#
|
||||
# Methods that change the working directory's content.
|
||||
#
|
||||
@@ -426,6 +438,7 @@ class Tester(TestCmd.TestCmd):
|
||||
return
|
||||
|
||||
self.previous_tree, dummy = tree.build_tree(self.workdir)
|
||||
self.wait_for_time_change_since_last_build()
|
||||
|
||||
if match is None:
|
||||
match = self.match
|
||||
@@ -444,6 +457,8 @@ class Tester(TestCmd.TestCmd):
|
||||
kw["program"] += self.program
|
||||
if extra_args:
|
||||
kw["program"] += extra_args
|
||||
if stdout is None and not any(a.startswith("-d") for a in kw["program"]):
|
||||
kw["program"] += self.verbosity
|
||||
if pass_toolset:
|
||||
kw["program"].append("toolset=" + self.toolset)
|
||||
if use_test_config:
|
||||
@@ -707,6 +722,7 @@ class Tester(TestCmd.TestCmd):
|
||||
self.ignore("*.rsp") # Response files.
|
||||
self.ignore("*.tds") # Borland debug symbols.
|
||||
self.ignore("*.manifest") # MSVC DLL manifests.
|
||||
self.ignore("bin/standalone/msvc/*/msvc-setup.bat")
|
||||
|
||||
# Debug builds of bjam built with gcc produce this profiling data.
|
||||
self.ignore("gmon.out")
|
||||
@@ -757,8 +773,8 @@ class Tester(TestCmd.TestCmd):
|
||||
matched = reduce(
|
||||
lambda x, y: x and reduce(
|
||||
lambda a, b: a and b,
|
||||
y),
|
||||
matched)
|
||||
y, True),
|
||||
matched, True)
|
||||
|
||||
if not matched:
|
||||
print "Expected:\n"
|
||||
@@ -856,6 +872,22 @@ class Tester(TestCmd.TestCmd):
|
||||
"""
|
||||
self.__wait_for_time_change(path, touch, last_build_time=False)
|
||||
|
||||
def wait_for_time_change_since_last_build(self):
|
||||
"""
|
||||
Wait for newly assigned file system modification timestamps to
|
||||
become large enough for the timestamp difference to be
|
||||
correctly recognized by the Python based testing framework.
|
||||
Does not care about Jam's timestamp resolution, since we
|
||||
only need this to detect touched files.
|
||||
"""
|
||||
if self.last_build_timestamp:
|
||||
timestamp_file = "timestamp-3df2f2317e15e4a9"
|
||||
open(timestamp_file, "wb").close()
|
||||
self.__wait_for_time_change_impl(timestamp_file,
|
||||
self.last_build_timestamp,
|
||||
self.__python_timestamp_resolution(timestamp_file, 0), 0)
|
||||
os.unlink(timestamp_file)
|
||||
|
||||
def __build_timestamp_resolution(self):
|
||||
"""
|
||||
Returns the minimum path modification timestamp resolution supported
|
||||
@@ -1103,7 +1135,12 @@ class Tester(TestCmd.TestCmd):
|
||||
|
||||
resolution = self.__python_timestamp_resolution(path, build_resolution)
|
||||
assert resolution >= build_resolution
|
||||
self.__wait_for_time_change_impl(path, start_time, resolution, build_resolution)
|
||||
|
||||
if not touch:
|
||||
os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime))
|
||||
|
||||
def __wait_for_time_change_impl(self, path, start_time, resolution, build_resolution):
|
||||
# Implementation notes:
|
||||
# * Theoretically time.sleep() API might get interrupted too soon
|
||||
# (never actually encountered).
|
||||
@@ -1160,9 +1197,6 @@ class Tester(TestCmd.TestCmd):
|
||||
break
|
||||
_sleep(max(0.01, start_time - c))
|
||||
|
||||
if not touch:
|
||||
os.utime(path, (stats_orig.st_atime, stats_orig.st_mtime))
|
||||
|
||||
|
||||
class List:
|
||||
def __init__(self, s=""):
|
||||
|
||||
29
test/Jamfile.jam
Normal file
29
test/Jamfile.jam
Normal file
@@ -0,0 +1,29 @@
|
||||
# Copyright 2018 Steven Watanabe
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import python ;
|
||||
import testing ;
|
||||
|
||||
if ! [ python.configured ]
|
||||
{
|
||||
using python ;
|
||||
}
|
||||
|
||||
# Not quite perfect, but good enough for most purposes
|
||||
local test-files = [ glob *.py ] ;
|
||||
|
||||
local boost-build-files = [ glob
|
||||
../src/tools/*.jam
|
||||
../src/tools/*/*.jam
|
||||
../src/build/*.jam
|
||||
../src/util/*.jam
|
||||
../src/kernel/*.jam
|
||||
../src/options/*.jam
|
||||
../src/*.jam ] ;
|
||||
|
||||
testing.make-test run-pyd : test_all.py :
|
||||
<dependency>$(test-files)
|
||||
<dependency>$(boost-build-files)
|
||||
;
|
||||
@@ -18,6 +18,7 @@ parser.add_option('-o', dest="output_file")
|
||||
parser.add_option('-x', dest="language")
|
||||
parser.add_option('-c', dest="compile", action="store_true")
|
||||
parser.add_option('-I', dest="includes", action="append")
|
||||
parser.add_option('-D', dest="defines", action="append")
|
||||
parser.add_option('-L', dest="library_path", action="append")
|
||||
parser.add_option('--dll', dest="dll", action="store_true")
|
||||
parser.add_option('--archive', dest="archive", action="store_true")
|
||||
@@ -101,6 +102,16 @@ class MockInfo(object):
|
||||
" != ", map(adjust_path, expected_options.includes)
|
||||
return False
|
||||
|
||||
if options.defines is None:
|
||||
options.defines = []
|
||||
if expected_options.defines is None:
|
||||
expected_options.defines = []
|
||||
if options.defines != expected_options.defines:
|
||||
if self.verbose:
|
||||
print " Failed to match -I ", options.defines, \
|
||||
" != ", expected_options.defines
|
||||
return False
|
||||
|
||||
if options.library_path is None:
|
||||
options.library_path = []
|
||||
if expected_options.library_path is None:
|
||||
@@ -203,16 +214,18 @@ generators.register-linker mock.link : LIB OBJ : EXE : <toolset>mock ;
|
||||
generators.register-linker mock.link.dll : LIB OBJ : SHARED_LIB : <toolset>mock ;
|
||||
generators.register-archiver mock.archive : OBJ : STATIC_LIB : <toolset>mock ;
|
||||
|
||||
toolset.flags mock.compile INCLUDES <include> ;
|
||||
toolset.flags mock.compile OPTIONS <link>shared : -fPIC ;
|
||||
toolset.flags mock.compile INCLUDES : <include> ;
|
||||
toolset.flags mock.compile DEFINES : <define> ;
|
||||
|
||||
actions compile.c
|
||||
{
|
||||
$(.config-cmd) mock.py -c -x c -I"$(INCLUDES)" "$(>)" -o "$(<)"
|
||||
$(.config-cmd) mock.py -c -x c -I"$(INCLUDES)" -D"$(DEFINES)" "$(>)" -o "$(<)"
|
||||
}
|
||||
|
||||
actions compile.c++
|
||||
{
|
||||
$(.config-cmd) mock.py -c -x c++ -I"$(INCLUDES)" "$(>)" -o "$(<)"
|
||||
$(.config-cmd) mock.py -c -x c++ -I"$(INCLUDES)" -D"$(DEFINES)" "$(>)" -o "$(<)"
|
||||
}
|
||||
|
||||
toolset.flags mock.link USER_OPTIONS <linkflags> ;
|
||||
|
||||
113
test/TestToolset.py
Normal file
113
test/TestToolset.py
Normal file
@@ -0,0 +1,113 @@
|
||||
#!/usr/bin/python
|
||||
#
|
||||
# Copyright 2017 Steven Watanabe
|
||||
#
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# validates a toolset using a mock of the compiler
|
||||
|
||||
import BoostBuild
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
renames = {"debug": "variant=debug", "release": "variant=release"}
|
||||
|
||||
def set_default_target_os(os):
|
||||
global removed
|
||||
global default_target_os
|
||||
default_target_os = os
|
||||
removed = set()
|
||||
removed.add("target-os=" + default_target_os)
|
||||
|
||||
def adjust_property(property):
|
||||
global renames
|
||||
if property in renames:
|
||||
return renames[property]
|
||||
else:
|
||||
return property
|
||||
|
||||
def adjust_properties(properties):
|
||||
global removed
|
||||
return [adjust_property(p) for p in properties if p not in removed]
|
||||
|
||||
def has_property(name, properties):
|
||||
return name in [re.sub("=.*", "", p) for p in properties]
|
||||
|
||||
def get_property(name, properties):
|
||||
for m in [re.match("(.*)=(.*)", p) for p in properties]:
|
||||
if m and m.group(1) == name:
|
||||
return m.group(2)
|
||||
|
||||
def get_target_os(properties):
|
||||
return get_property("target-os", properties) or default_target_os
|
||||
|
||||
def expand_properties(properties):
|
||||
result = properties[:]
|
||||
if not has_property("variant", properties):
|
||||
result += ["variant=debug"]
|
||||
if not has_property("threading", properties):
|
||||
result += ["threading=single"]
|
||||
if not has_property("link", properties):
|
||||
result += ["link=shared"]
|
||||
if not has_property("runtime-link", properties):
|
||||
result += ["runtime-link=shared"]
|
||||
if not has_property("strip", properties):
|
||||
result += ["strip=off"]
|
||||
if not has_property("target-os", properties):
|
||||
result += ["target-os=" + default_target_os]
|
||||
return result
|
||||
|
||||
def compute_path(properties, target_type):
|
||||
path = ""
|
||||
if "variant=release" in properties:
|
||||
path += "/release"
|
||||
else:
|
||||
path += "/debug"
|
||||
if has_property("address-model", properties):
|
||||
path += "/address-model-" + get_property("address-model", properties)
|
||||
if has_property("architecture", properties):
|
||||
path += "/architecture-" + get_property("architecture", properties)
|
||||
if "cxxstd=latest" in properties:
|
||||
path += "/cxxstd-latest-iso"
|
||||
if "link=static" in properties:
|
||||
path += "/link-static"
|
||||
if "runtime-link=static" in properties and target_type in ["exe"]:
|
||||
path += "/runtime-link-static"
|
||||
if "strip=on" in properties and target_type in ["dll", "exe", "obj2"]:
|
||||
path += "/strip-on"
|
||||
if get_target_os(properties) != default_target_os:
|
||||
path += "/target-os-" + get_target_os(properties)
|
||||
if "threading=multi" in properties:
|
||||
path += "/threading-multi"
|
||||
return path
|
||||
|
||||
def test_toolset(toolset, version, property_sets):
|
||||
t = BoostBuild.Tester()
|
||||
|
||||
t.set_tree("toolset-mock")
|
||||
|
||||
# Build necessary tools
|
||||
t.run_build_system(["-sPYTHON_CMD=%s" % sys.executable], subdir="src")
|
||||
set_default_target_os(t.read("src/bin/target-os.txt").strip())
|
||||
|
||||
for properties in property_sets:
|
||||
t.set_toolset(toolset + "-" + version, get_target_os(properties))
|
||||
properties = adjust_properties(properties)
|
||||
def path(t):
|
||||
return toolset.split("-")[0] + "-*" + version + compute_path(properties, t)
|
||||
os.environ["B2_PROPERTIES"] = " ".join(expand_properties(properties))
|
||||
t.run_build_system(["--user-config="] + properties)
|
||||
t.expect_addition("bin/%s/lib.obj" % (path("obj")))
|
||||
if "link=static" not in properties:
|
||||
t.expect_addition("bin/%s/l1.dll" % (path("dll")))
|
||||
else:
|
||||
t.expect_addition("bin/%s/l1.lib" % (path("lib")))
|
||||
t.expect_addition("bin/%s/main.obj" % (path("obj2")))
|
||||
t.expect_addition("bin/%s/test.exe" % (path("exe")))
|
||||
t.expect_nothing_more()
|
||||
t.rm("bin")
|
||||
|
||||
t.cleanup()
|
||||
@@ -99,9 +99,7 @@ int main() {}
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
# We do not pass the '-d0' option to Boost Build here to get more detailed
|
||||
# information in case of failure.
|
||||
t = BoostBuild.Tester(pass_d0=False, use_test_config=False)
|
||||
t = BoostBuild.Tester(use_test_config=False)
|
||||
|
||||
test_alias_rule(t)
|
||||
test_alias_source_usage_requirements(t)
|
||||
|
||||
@@ -88,7 +88,7 @@ exe a : a_empty.cpp ;
|
||||
exe a : a.cpp ;
|
||||
""")
|
||||
t.run_build_system(["--no-error-backtrace"], status=None)
|
||||
t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
|
||||
t.expect_output_lines("error: No best alternative for ./a")
|
||||
|
||||
# Another ambiguity test: two matches properties in one alternative are neither
|
||||
# better nor worse than a single one in another alternative.
|
||||
@@ -98,7 +98,8 @@ exe a : a.cpp : <debug-symbols>on ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["--no-error-backtrace"], status=None)
|
||||
t.fail_test(string.find(t.stdout(), "No best alternative") == -1)
|
||||
t.expect_output_lines("error: No best alternative for ./a")
|
||||
t.rm("bin")
|
||||
|
||||
# Test that we can have alternative without sources.
|
||||
t.write("jamfile.jam", """\
|
||||
@@ -108,7 +109,21 @@ feature.extend os : MAGIC ;
|
||||
alias specific-sources : b.cpp : <os>MAGIC ;
|
||||
exe a : a.cpp specific-sources ;
|
||||
""")
|
||||
t.rm("bin")
|
||||
t.run_build_system()
|
||||
t.expect_addition("bin/$toolset/debug*/a.exe")
|
||||
t.rm("bin")
|
||||
|
||||
# Test that subfeatures are expanded in alternatives
|
||||
# and that unknown subfeatures fail to match instead of
|
||||
# causing errors.
|
||||
t.write("jamfile.jam", """\
|
||||
import feature : feature subfeature ;
|
||||
feature X : off on : propagated ;
|
||||
subfeature X on : version : 1 : propagated ;
|
||||
exe a : a.cpp : <X>on-1 ;
|
||||
exe a : a_empty.cpp ;
|
||||
exe a : a_empty.cpp : <X>on-2 ;
|
||||
""")
|
||||
t.run_build_system(["X=on-1"])
|
||||
|
||||
t.cleanup()
|
||||
|
||||
166
test/command_line_properties.py
Normal file
166
test/command_line_properties.py
Normal file
@@ -0,0 +1,166 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import BoostBuild
|
||||
|
||||
def test_basic():
|
||||
'''Tests that feature=value works'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : 1 2 ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=2'])
|
||||
t.expect_content("bin/*/output.txt", "2")
|
||||
t.cleanup()
|
||||
|
||||
def test_implicit():
|
||||
'''Tests that implicit features can be named without a feature'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : v1 v2 : implicit ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['v2'])
|
||||
t.expect_content("bin/*/output.txt", "v2")
|
||||
t.cleanup()
|
||||
|
||||
def test_optional():
|
||||
'''Tests that feature= works for optional features'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : 1 2 : optional ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
actions run { echo b $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1='])
|
||||
t.expect_content("bin/*/output.txt", "b")
|
||||
t.cleanup()
|
||||
|
||||
def test_free():
|
||||
'''Free features named on the command line apply to all targets
|
||||
everywhere. Free features can contain any characters, even those
|
||||
that have a special meaning.'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : : free ;
|
||||
make output1.txt : : @run : <dependency>output2.txt ;
|
||||
make output2.txt : : @run ;
|
||||
explicit output2.txt ;
|
||||
flags run OPTIONS <f1> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=x,/:-'])
|
||||
t.expect_content("bin*/output1.txt", "x,/:-")
|
||||
t.expect_content("bin*/output2.txt", "x,/:-")
|
||||
t.cleanup()
|
||||
|
||||
def test_subfeature():
|
||||
'''Subfeatures should be expressed as feature=value-subvalue'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature subfeature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : 1 2 ;
|
||||
subfeature f1 2 : sub : x y ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1-2:sub> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=2-y'])
|
||||
t.expect_content("bin/*/output.txt", "y")
|
||||
t.cleanup()
|
||||
|
||||
def test_multiple_values():
|
||||
'''Multiple values of a feature can be given in a comma-separated list'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : 1 2 3 ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=2,3'])
|
||||
t.expect_content("bin*/f1-2*/output.txt", "2")
|
||||
t.expect_content("bin*/f1-3*/output.txt", "3")
|
||||
t.cleanup()
|
||||
|
||||
def test_multiple_properties():
|
||||
'''Multiple properties can be grouped with /'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
feature f1 : 1 2 ;
|
||||
feature f2 : 3 4 ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
flags run OPTIONS <f2> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=2/f2=4'])
|
||||
t.expect_content("bin/*/output.txt", "2 4")
|
||||
t.cleanup()
|
||||
|
||||
def test_cross_product():
|
||||
'''If multiple properties are specified on the command line
|
||||
we expand to every possible maximum set of non-conflicting features.
|
||||
This test should be run after testing individual components in
|
||||
isolation.'''
|
||||
t = BoostBuild.Tester()
|
||||
t.write('Jamroot.jam', '''
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
# Make features symmetric to make the paths easier to distingush
|
||||
feature f1 : 11 12 13 14 15 : symmetric ;
|
||||
feature f2 : 21 22 23 : symmetric ;
|
||||
feature f3 : v1 v2 v3 v4 : implicit symmetric ;
|
||||
feature f4 : : free ;
|
||||
make output.txt : : @run ;
|
||||
flags run OPTIONS <f1> ;
|
||||
flags run OPTIONS <f2> ;
|
||||
flags run OPTIONS <f3> ;
|
||||
flags run OPTIONS <f4> ;
|
||||
actions run { echo $(OPTIONS) > $(<) }
|
||||
''')
|
||||
t.run_build_system(['f1=12,13/f2=22', 'v2', 'v3', 'f1=14', 'f2=23',
|
||||
'f4=xxx', 'f4=yyy', 'v4/f1=15/f4=zzz'])
|
||||
t.expect_content("bin*/v2*/f1-12/f2-22*/output.txt", "12 22 v2 xxx yyy")
|
||||
t.expect_addition("bin*/v2*/f1-12/f2-22*/output.txt")
|
||||
t.expect_content("bin*/v2*/f1-13/f2-22*/output.txt", "13 22 v2 xxx yyy")
|
||||
t.expect_addition("bin*/v2*/f1-13/f2-22*/output.txt")
|
||||
t.expect_content("bin*/v2*/f1-14/f2-23*/output.txt", "14 23 v2 xxx yyy")
|
||||
t.expect_addition("bin*/v2*/f1-14/f2-23*/output.txt")
|
||||
t.expect_content("bin*/v3*/f1-12/f2-22*/output.txt", "12 22 v3 xxx yyy")
|
||||
t.expect_addition("bin*/v3*/f1-12/f2-22*/output.txt")
|
||||
t.expect_content("bin*/v3*/f1-13/f2-22*/output.txt", "13 22 v3 xxx yyy")
|
||||
t.expect_addition("bin*/v3*/f1-13/f2-22*/output.txt")
|
||||
t.expect_content("bin*/v3*/f1-14/f2-23*/output.txt", "14 23 v3 xxx yyy")
|
||||
t.expect_addition("bin*/v3*/f1-14/f2-23*/output.txt")
|
||||
t.expect_content("bin*/v4*/f1-15/f2-23*/output.txt", "15 23 v4 xxx yyy zzz")
|
||||
t.expect_addition("bin*/v4*/f1-15/f2-23*/output.txt")
|
||||
t.expect_nothing_more()
|
||||
t.cleanup()
|
||||
|
||||
test_basic()
|
||||
test_implicit()
|
||||
test_optional()
|
||||
test_free()
|
||||
test_subfeature()
|
||||
test_multiple_values()
|
||||
test_multiple_properties()
|
||||
test_cross_product()
|
||||
@@ -8,6 +8,7 @@
|
||||
# Test Boost Build configuration file handling.
|
||||
|
||||
import BoostBuild
|
||||
import TestCmd
|
||||
|
||||
import os
|
||||
import os.path
|
||||
@@ -316,6 +317,71 @@ for x in $(names)
|
||||
t.cleanup()
|
||||
|
||||
|
||||
def test_site_config():
|
||||
# Ignore user-config, just in case it depends on the user's site-config.jam
|
||||
t = BoostBuild.Tester(["--user-config="], use_test_config=False,
|
||||
pass_toolset=0)
|
||||
# We can immediately exit after we finish loading the config files
|
||||
t.write("Jamroot", "EXIT Done : 0 ;")
|
||||
t.write("my-site-config.jam", "ECHO Loaded my-site-config ;")
|
||||
|
||||
t.run_build_system(["--site-config=my-site-config.jam"],
|
||||
stdout="Loaded my-site-config\nDone\n")
|
||||
|
||||
t.run_build_system(["--ignore-site-config", "--debug-configuration"])
|
||||
t.expect_output_lines("""\
|
||||
notice: Site configuration files will be ignored due to the
|
||||
notice: --ignore-site-config command-line option.""")
|
||||
|
||||
t.run_build_system(["--site-config=", "--debug-configuration"])
|
||||
t.expect_output_lines("""\
|
||||
notice: Site configuration file loading explicitly disabled.""")
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_global_config():
|
||||
t = BoostBuild.Tester(use_test_config=False, pass_toolset=0)
|
||||
t.write("my-config.jam", "ECHO Loading my-config ;")
|
||||
t.write("Jamroot", "EXIT Done : 0 ;")
|
||||
t.write("project-config.jam", "ECHO bad ;")
|
||||
t.run_build_system(["--config=my-config.jam", "--debug-configuration"],
|
||||
match=TestCmd.match_re, stdout=
|
||||
r"""notice: found boost-build\.jam at .*
|
||||
notice: loading Boost\.Build from .*
|
||||
notice: Searching '.*' for all-config configuration file 'my-config\.jam'\.
|
||||
notice: Loading all-config configuration file 'my-config\.jam' from '.*'\.
|
||||
Loading my-config
|
||||
notice: Regular configuration files will be ignored due
|
||||
notice: to the global configuration being loaded\.
|
||||
Done
|
||||
""")
|
||||
t.run_build_system(["--config=", "--debug-configuration"],
|
||||
match=TestCmd.match_re, stdout=
|
||||
r"""notice: found boost-build\.jam at .*
|
||||
notice: loading Boost\.Build from .*
|
||||
notice: Configuration file loading explicitly disabled.
|
||||
Done
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
def test_project_config():
|
||||
t = BoostBuild.Tester(["--user-config=", "--site-config="],
|
||||
use_test_config=False, pass_toolset=False)
|
||||
t.write("Jamroot", "EXIT Done : 0 ;")
|
||||
t.write("project-config.jam", "ECHO Loading Root ;")
|
||||
t.write("my-project-config.jam", "ECHO Loading explicit ;")
|
||||
t.write("sub/project-config.jam", "ECHO Loading subdir ;")
|
||||
t.write("sub/Jamfile", "")
|
||||
|
||||
t.run_build_system(stdout="Loading Root\nDone\n")
|
||||
t.run_build_system(subdir="sub", stdout="Loading subdir\nDone\n")
|
||||
t.rm("sub/project-config.jam")
|
||||
t.run_build_system(subdir="sub", stdout="Loading Root\nDone\n")
|
||||
t.run_build_system(["--project-config=my-project-config.jam"],
|
||||
stdout="Loading explicit\nDone\n")
|
||||
|
||||
t.cleanup()
|
||||
|
||||
###############################################################################
|
||||
#
|
||||
# main()
|
||||
@@ -326,3 +392,6 @@ for x in $(names)
|
||||
canSetEmptyEnvironmentVariable = _canSetEmptyEnvironmentVariable()
|
||||
|
||||
test_user_configuration()
|
||||
test_site_config()
|
||||
test_global_config()
|
||||
test_project_config()
|
||||
|
||||
128
test/configure.py
Normal file
128
test/configure.py
Normal file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2017 Steven Watanabe
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Tests configure.check-target-builds and friends
|
||||
|
||||
import BoostBuild
|
||||
|
||||
def test_check_target_builds():
|
||||
t = BoostBuild.Tester(use_test_config=0)
|
||||
t.write("Jamroot", """
|
||||
import configure ;
|
||||
obj pass : pass.cpp ;
|
||||
obj fail : fail.cpp ;
|
||||
explicit pass fail ;
|
||||
obj foo : foo.cpp :
|
||||
[ configure.check-target-builds pass : <define>PASS : <define>FAIL ] ;
|
||||
obj bar : foo.cpp :
|
||||
[ configure.check-target-builds fail : <define>FAIL : <define>PASS ] ;
|
||||
""")
|
||||
t.write("pass.cpp", "void f() {}\n")
|
||||
t.write("fail.cpp", "#error fail.cpp\n")
|
||||
t.write("foo.cpp", """
|
||||
#ifndef PASS
|
||||
#error PASS not defined
|
||||
#endif
|
||||
#ifdef FAIL
|
||||
#error FAIL is defined
|
||||
#endif
|
||||
""")
|
||||
t.run_build_system()
|
||||
t.expect_output_lines([
|
||||
" - pass builds : yes",
|
||||
" - fail builds : no"])
|
||||
t.expect_addition("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_addition("bin/$toolset/debug*/foo.obj")
|
||||
t.expect_addition("bin/$toolset/debug*/bar.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
# An up-to-date build should use the cache
|
||||
t.run_build_system()
|
||||
t.expect_output_lines([
|
||||
" - pass builds : yes (cached)",
|
||||
" - fail builds : no (cached)"])
|
||||
t.expect_nothing_more()
|
||||
|
||||
# -a should re-run everything, including configuration checks
|
||||
t.run_build_system(["-a"])
|
||||
t.expect_output_lines([
|
||||
" - pass builds : yes",
|
||||
" - fail builds : no"])
|
||||
t.expect_touch("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_touch("bin/$toolset/debug*/foo.obj")
|
||||
t.expect_touch("bin/$toolset/debug*/bar.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
# --reconfigure should re-run configuration checks only
|
||||
t.run_build_system(["--reconfigure"])
|
||||
t.expect_output_lines([
|
||||
" - pass builds : yes",
|
||||
" - fail builds : no"])
|
||||
t.expect_touch("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
# -a -n should not rebuild configuration checks
|
||||
t.run_build_system(["-a", "-n"])
|
||||
t.expect_output_lines([
|
||||
" - pass builds : yes (cached)",
|
||||
" - fail builds : no (cached)"])
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_choose():
|
||||
t = BoostBuild.Tester(use_test_config=0)
|
||||
t.write("Jamroot", """
|
||||
import configure ;
|
||||
obj pass : pass.cpp ;
|
||||
obj fail : fail.cpp ;
|
||||
explicit pass fail ;
|
||||
obj foo : foo.cpp :
|
||||
[ configure.choose "which one?" : fail <define>FAIL : pass <define>PASS ] ;
|
||||
""")
|
||||
t.write("pass.cpp", "void f() {}\n")
|
||||
t.write("fail.cpp", "#error fail.cpp\n")
|
||||
t.write("foo.cpp", """
|
||||
#ifndef PASS
|
||||
#error PASS not defined
|
||||
#endif
|
||||
#ifdef FAIL
|
||||
#error FAIL is defined
|
||||
#endif
|
||||
""")
|
||||
t.run_build_system()
|
||||
t.expect_output_lines([
|
||||
" - which one? : pass"])
|
||||
t.expect_addition("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_addition("bin/$toolset/debug*/foo.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
# An up-to-date build should use the cache
|
||||
t.run_build_system()
|
||||
t.expect_output_lines([
|
||||
" - which one? : pass (cached)"])
|
||||
t.expect_nothing_more()
|
||||
|
||||
# -a should re-run everything, including configuration checks
|
||||
t.run_build_system(["-a"])
|
||||
t.expect_output_lines([
|
||||
" - which one? : pass"])
|
||||
t.expect_touch("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_touch("bin/$toolset/debug*/foo.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
# --reconfigure should re-run configuration checks only
|
||||
t.run_build_system(["--reconfigure"])
|
||||
t.expect_output_lines([
|
||||
" - which one? : pass"])
|
||||
t.expect_touch("bin/$toolset/debug*/pass.obj")
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
test_check_target_builds()
|
||||
test_choose()
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(["-d1"], pass_d0=False, pass_toolset=False)
|
||||
t = BoostBuild.Tester(["-d1"], pass_toolset=False)
|
||||
|
||||
t.write("file.jam", """\
|
||||
prefix = "echo \\"" ;
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_varargs(t, *args, **kwargs):
|
||||
test(t, "varargs", *args, **kwargs)
|
||||
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("echo_args.jam", """\
|
||||
NOCARE all ;
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import BoostBuild
|
||||
import os
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(["-d1"], pass_toolset=0)
|
||||
|
||||
t.write("subdir1/file-to-bind", "# This file intentionally left blank")
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(["-ffile.jam"], pass_d0=False, pass_toolset=0)
|
||||
t = BoostBuild.Tester(["-ffile.jam"], pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions a { }
|
||||
|
||||
139
test/core_fail_expected.py
Normal file
139
test/core_fail_expected.py
Normal file
@@ -0,0 +1,139 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2017 Steven Watanabe
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import BoostBuild
|
||||
|
||||
def test_basic():
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions fail
|
||||
{
|
||||
invalid-dd0eeb5899734622
|
||||
}
|
||||
|
||||
FAIL_EXPECTED t1 ;
|
||||
fail t1 ;
|
||||
|
||||
UPDATE t1 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-ffile.jam"])
|
||||
t.expect_output_lines("...failed*", False)
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_error():
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions pass
|
||||
{
|
||||
echo okay >$(<)
|
||||
}
|
||||
|
||||
FAIL_EXPECTED t1 ;
|
||||
pass t1 ;
|
||||
|
||||
UPDATE t1 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-ffile.jam"], status=1)
|
||||
t.expect_output_lines("...failed pass t1...")
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_multiple_actions():
|
||||
"""FAIL_EXPECTED targets are considered to pass if the first
|
||||
updating action fails. Further actions will be skipped."""
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions fail
|
||||
{
|
||||
invalid-dd0eeb5899734622
|
||||
}
|
||||
|
||||
actions pass
|
||||
{
|
||||
echo okay >$(<)
|
||||
}
|
||||
|
||||
FAIL_EXPECTED t1 ;
|
||||
fail t1 ;
|
||||
pass t1 ;
|
||||
|
||||
UPDATE t1 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-ffile.jam", "-d1"])
|
||||
t.expect_output_lines("...failed*", False)
|
||||
t.expect_output_lines("pass t1", False)
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_quitquick():
|
||||
"""Tests that FAIL_EXPECTED targets do not cause early exit
|
||||
on failure."""
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions fail
|
||||
{
|
||||
invalid-dd0eeb5899734622
|
||||
}
|
||||
|
||||
actions pass
|
||||
{
|
||||
echo okay >$(<)
|
||||
}
|
||||
|
||||
FAIL_EXPECTED t1 ;
|
||||
fail t1 ;
|
||||
|
||||
pass t2 ;
|
||||
|
||||
UPDATE t1 t2 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-ffile.jam", "-q", "-d1"])
|
||||
t.expect_output_lines("pass t2")
|
||||
t.expect_addition("t2")
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
def test_quitquick_error():
|
||||
"""FAIL_EXPECTED targets should cause early exit if they unexpectedly pass."""
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions pass
|
||||
{
|
||||
echo okay >$(<)
|
||||
}
|
||||
|
||||
FAIL_EXPECTED t1 ;
|
||||
pass t1 ;
|
||||
pass t2 ;
|
||||
|
||||
UPDATE t1 t2 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-ffile.jam", "-q", "-d1"], status=1)
|
||||
t.expect_output_lines("pass t2", False)
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
test_basic()
|
||||
test_error()
|
||||
test_multiple_actions()
|
||||
test_quitquick()
|
||||
test_quitquick_error()
|
||||
@@ -67,8 +67,7 @@ module c
|
||||
IMPORT_MODULE c : ;
|
||||
c.test ;
|
||||
|
||||
actions do-nothing { }
|
||||
do-nothing all ;
|
||||
EXIT : 0 ;
|
||||
""")
|
||||
|
||||
t.run_build_system(["-fcode"], stdout="""\
|
||||
@@ -77,6 +76,7 @@ R2
|
||||
L1
|
||||
A.L1
|
||||
CTEST
|
||||
|
||||
""")
|
||||
|
||||
t.cleanup()
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import BoostBuild
|
||||
import sys
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=False, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=False)
|
||||
|
||||
t.write("file.jam", """
|
||||
actions run {
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(["-d1"], pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """
|
||||
actions update
|
||||
|
||||
@@ -49,7 +49,7 @@ def test_raw_empty():
|
||||
# get an extra "\r" added in front of it on output.
|
||||
whitespace_out = whitespace_in.replace("\r\n", "\n").replace("\n", "\r\n")
|
||||
|
||||
t = BoostBuild.Tester(["-d2", "-d+4"], pass_d0=False, pass_toolset=0,
|
||||
t = BoostBuild.Tester(["-d2", "-d+4"], pass_toolset=0,
|
||||
use_test_config=False)
|
||||
t.write("file.jam", """\
|
||||
actions do_empty {%s}
|
||||
@@ -67,7 +67,7 @@ do_empty all ;
|
||||
|
||||
|
||||
def test_raw_nt(n=None, error=False):
|
||||
t = BoostBuild.Tester(["-d1", "-d+4"], pass_d0=False, pass_toolset=0,
|
||||
t = BoostBuild.Tester(["-d1", "-d+4"], pass_toolset=0,
|
||||
use_test_config=False)
|
||||
|
||||
cmd_prefix = "%s -c \"print('XXX: " % executable
|
||||
@@ -135,7 +135,7 @@ do_echo all ;
|
||||
|
||||
|
||||
def test_raw_to_shell_fallback_nt():
|
||||
t = BoostBuild.Tester(["-d1", "-d+4"], pass_d0=False, pass_toolset=0,
|
||||
t = BoostBuild.Tester(["-d1", "-d+4"], pass_toolset=0,
|
||||
use_test_config=False)
|
||||
|
||||
cmd_prefix = '%s -c print(' % executable
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions .a.
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions .a.
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(["-d1"], pass_toolset=0)
|
||||
|
||||
t.write("sleep.bat", """\
|
||||
::@timeout /T %1 /NOBREAK >nul
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(["-d1"], pass_toolset=0)
|
||||
|
||||
t.write("sleep.bat", """\
|
||||
::@timeout /T %1 /NOBREAK >nul
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("sleep.bat", """\
|
||||
::@timeout /T %1 /NOBREAK >nul
|
||||
|
||||
@@ -9,7 +9,7 @@ import os
|
||||
|
||||
|
||||
def basic():
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions do-print
|
||||
@@ -39,7 +39,7 @@ updating target1
|
||||
|
||||
|
||||
def ignore_minus_n():
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions do-print
|
||||
@@ -72,7 +72,7 @@ updating target1
|
||||
|
||||
|
||||
def failed_target():
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions fail
|
||||
@@ -120,7 +120,7 @@ do-print target2
|
||||
|
||||
|
||||
def missing_target():
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions do-print
|
||||
@@ -155,7 +155,7 @@ def build_once():
|
||||
effect.
|
||||
|
||||
"""
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions do-print
|
||||
@@ -199,7 +199,7 @@ def return_status():
|
||||
Make sure that UPDATE_NOW returns a failure status if
|
||||
the target failed in a previous call to UPDATE_NOW
|
||||
"""
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("file.jam", """\
|
||||
actions fail
|
||||
@@ -237,7 +237,7 @@ update2:
|
||||
def save_restore():
|
||||
"""Tests that ignore-minus-n and ignore-minus-q are
|
||||
local to the call to UPDATE_NOW"""
|
||||
t = BoostBuild.Tester(pass_toolset=0, pass_d0=False)
|
||||
t = BoostBuild.Tester(pass_toolset=0)
|
||||
|
||||
t.write("actions.jam", """\
|
||||
rule fail
|
||||
|
||||
@@ -61,6 +61,6 @@ t.write("r.rcc", """
|
||||
""")
|
||||
|
||||
t.run_build_system()
|
||||
t.expect_content("bin/$toolset/debug*/r.obj", "rc-object")
|
||||
t.expect_content("bin/r.obj", "rc-object")
|
||||
|
||||
t.cleanup()
|
||||
|
||||
146
test/debugger.py
146
test/debugger.py
@@ -97,6 +97,9 @@ Breakpoint 1, f ( ) at test.jam:8
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
# Note: step doesn't need to worry about breakpoints,
|
||||
# as it always stops at the next line executed.
|
||||
|
||||
def test_next():
|
||||
t = make_tester()
|
||||
t.write("test.jam", """\
|
||||
@@ -137,6 +140,51 @@ Breakpoint 1, f ( ) at test.jam:7
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
def test_next_breakpoint():
|
||||
"""next should stop if it encounters a breakpoint.
|
||||
If the normal end point happens to be a breakpoint,
|
||||
then it should be reported as normal stepping."""
|
||||
t = make_tester()
|
||||
t.write("test.jam", """\
|
||||
rule f ( recurse ? )
|
||||
{
|
||||
if $(recurse) { f ; }
|
||||
a = 1 ;
|
||||
}
|
||||
rule g ( )
|
||||
{
|
||||
b = 2 ;
|
||||
}
|
||||
f true ;
|
||||
g ;
|
||||
""")
|
||||
run(t, """\
|
||||
(b2db) break f
|
||||
Breakpoint 1 set at f
|
||||
(b2db) break g
|
||||
Breakpoint 2 set at g
|
||||
(b2db) break test.jam:4
|
||||
Breakpoint 3 set at test.jam:4
|
||||
(b2db) run -ftest.jam
|
||||
Starting program: {{bjam}} -ftest.jam
|
||||
Breakpoint 1, f ( true ) at test.jam:3
|
||||
3 if $(recurse) { f ; }
|
||||
(b2db) next
|
||||
Breakpoint 1, f ( ) at test.jam:3
|
||||
3 if $(recurse) { f ; }
|
||||
(b2db) next
|
||||
4 a = 1 ;
|
||||
(b2db) next
|
||||
4 a = 1 ;
|
||||
(b2db) next
|
||||
11 g ;
|
||||
(b2db) next
|
||||
Breakpoint 2, g ( ) at test.jam:8
|
||||
8 b = 2 ;
|
||||
(b2db) quit
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
def test_finish():
|
||||
t = make_tester()
|
||||
t.write("test.jam", """\
|
||||
@@ -178,7 +226,100 @@ Breakpoint 1, f ( ) at test.jam:3
|
||||
(b2db) quit
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
|
||||
def test_finish_breakpoints():
|
||||
"""finish should stop when it reaches a breakpoint."""
|
||||
t = make_tester()
|
||||
t.write("test.jam", """\
|
||||
rule f ( recurse * )
|
||||
{
|
||||
if $(recurse)
|
||||
{
|
||||
a = [ f $(recurse[2-]) ] ;
|
||||
}
|
||||
}
|
||||
rule g ( list * )
|
||||
{
|
||||
for local v in $(list)
|
||||
{
|
||||
x = $(v) ;
|
||||
}
|
||||
}
|
||||
f 1 2 ;
|
||||
g 1 2 ;
|
||||
""")
|
||||
run(t, """\
|
||||
(b2db) break test.jam:5
|
||||
Breakpoint 1 set at test.jam:5
|
||||
(b2db) break test.jam:12
|
||||
Breakpoint 2 set at test.jam:12
|
||||
(b2db) run -ftest.jam
|
||||
Starting program: {{bjam}} -ftest.jam
|
||||
Breakpoint 1, f ( 1 2 ) at test.jam:5
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) finish
|
||||
Breakpoint 1, f ( 2 ) at test.jam:5
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) finish
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) finish
|
||||
16 g 1 2 ;
|
||||
(b2db) finish
|
||||
Breakpoint 2, g ( 1 2 ) at test.jam:12
|
||||
12 x = $(v) ;
|
||||
(b2db) finish
|
||||
Breakpoint 2, g ( 1 2 ) at test.jam:12
|
||||
12 x = $(v) ;
|
||||
(b2db) quit
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
def test_continue_breakpoints():
|
||||
"""continue should stop when it reaches a breakpoint"""
|
||||
t = make_tester()
|
||||
t.write("test.jam", """\
|
||||
rule f ( recurse * )
|
||||
{
|
||||
if $(recurse)
|
||||
{
|
||||
a = [ f $(recurse[2-]) ] ;
|
||||
}
|
||||
}
|
||||
rule g ( list * )
|
||||
{
|
||||
for local v in $(list)
|
||||
{
|
||||
x = $(v) ;
|
||||
}
|
||||
}
|
||||
f 1 2 ;
|
||||
g 1 2 ;
|
||||
""")
|
||||
run(t, """\
|
||||
(b2db) break test.jam:5
|
||||
Breakpoint 1 set at test.jam:5
|
||||
(b2db) break test.jam:12
|
||||
Breakpoint 2 set at test.jam:12
|
||||
(b2db) run -ftest.jam
|
||||
Starting program: {{bjam}} -ftest.jam
|
||||
Breakpoint 1, f ( 1 2 ) at test.jam:5
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) continue
|
||||
Breakpoint 1, f ( 2 ) at test.jam:5
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) continue
|
||||
Breakpoint 1, f ( 1 2 ) at test.jam:5
|
||||
5 a = [ f $(recurse[2-]) ] ;
|
||||
(b2db) continue
|
||||
Breakpoint 2, g ( 1 2 ) at test.jam:12
|
||||
12 x = $(v) ;
|
||||
(b2db) continue
|
||||
Breakpoint 2, g ( 1 2 ) at test.jam:12
|
||||
12 x = $(v) ;
|
||||
(b2db) quit
|
||||
""")
|
||||
t.cleanup()
|
||||
|
||||
def test_breakpoints():
|
||||
"""Tests the interaction between the following commands:
|
||||
break, clear, delete, disable, enable"""
|
||||
@@ -519,7 +660,10 @@ test_run()
|
||||
test_exit_status()
|
||||
test_step()
|
||||
test_next()
|
||||
test_next_breakpoint()
|
||||
test_finish()
|
||||
test_finish_breakpoints()
|
||||
test_continue_breakpoints()
|
||||
test_breakpoints()
|
||||
test_breakpoints_running()
|
||||
test_backtrace()
|
||||
|
||||
@@ -10,7 +10,7 @@ import BoostBuild
|
||||
|
||||
|
||||
def test_basic():
|
||||
t = BoostBuild.Tester(["-d3", "-d+12"], pass_d0=False, use_test_config=False)
|
||||
t = BoostBuild.Tester(["-d3", "-d+12"], use_test_config=False)
|
||||
|
||||
t.write("a.cpp", """
|
||||
#include <a.h>
|
||||
@@ -211,7 +211,7 @@ def test_scanned_includes_with_absolute_paths():
|
||||
considered when scanning dependencies.
|
||||
|
||||
"""
|
||||
t = BoostBuild.Tester(["-d3", "-d+12"], pass_d0=False)
|
||||
t = BoostBuild.Tester(["-d3", "-d+12"])
|
||||
|
||||
t.write("jamroot.jam", """\
|
||||
path-constant TOP : . ;
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(use_test_config=False)
|
||||
|
||||
# First check some startup.
|
||||
|
||||
t.write("jamroot.jam", "")
|
||||
t.write("jamfile.jam", """\
|
||||
exe a : a.cpp b ;
|
||||
lib b : b.cpp ;
|
||||
""")
|
||||
|
||||
t.write("a.cpp", """\
|
||||
void
|
||||
# ifdef _WIN32
|
||||
__declspec(dllimport)
|
||||
# endif
|
||||
foo();
|
||||
int main() { foo(); }
|
||||
""")
|
||||
|
||||
t.write("b.cpp", """\
|
||||
#ifdef MACROS
|
||||
void
|
||||
# ifdef _WIN32
|
||||
__declspec(dllexport)
|
||||
# endif
|
||||
foo() {}
|
||||
#endif
|
||||
|
||||
# ifdef _WIN32
|
||||
int __declspec(dllexport) force_implib_creation;
|
||||
# endif
|
||||
""")
|
||||
|
||||
t.run_build_system(["define=MACROS"])
|
||||
t.expect_addition("bin/$toolset/debug*/"
|
||||
* (BoostBuild.List("a.obj b.obj b.dll a.exe")))
|
||||
|
||||
|
||||
# When building a debug version, the 'define' still applies.
|
||||
t.rm("bin")
|
||||
t.run_build_system(["debug", "define=MACROS"])
|
||||
t.expect_addition("bin/$toolset/debug*/"
|
||||
* (BoostBuild.List("a.obj b.obj b.dll a.exe")))
|
||||
|
||||
|
||||
# When building a release version, the 'define' still applies.
|
||||
t.write("jamfile.jam", """\
|
||||
exe a : a.cpp b : <variant>debug ;
|
||||
lib b : b.cpp ;
|
||||
""")
|
||||
t.rm("bin")
|
||||
t.run_build_system(["release", "define=MACROS"])
|
||||
|
||||
|
||||
# Regression test: direct build request was not working when there was more
|
||||
# than one level of 'build-project'.
|
||||
t.rm(".")
|
||||
t.write("jamroot.jam", "")
|
||||
t.write("jamfile.jam", "build-project a ;")
|
||||
t.write("a/jamfile.jam", "build-project b ;")
|
||||
t.write("a/b/jamfile.jam", "")
|
||||
t.run_build_system(["release"])
|
||||
|
||||
t.cleanup()
|
||||
@@ -143,4 +143,21 @@ es2 = t.adjust_name("b/bin/$toolset/debug*")
|
||||
t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es1);
|
||||
t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es2);
|
||||
|
||||
t.rm("bin/$toolset/debug*/mp.pathlist")
|
||||
|
||||
# Now run the same checks with pre-built libraries
|
||||
adll = t.glob_file("a/bin/$toolset/debug*/a.dll")
|
||||
bdll = t.glob_file("b/bin/$toolset/debug*/b.dll")
|
||||
t.write("b/jamfile.jam", """
|
||||
local bdll = %s ;
|
||||
# Make sure that it is found even with multiple source-locations
|
||||
project : source-location c $(bdll:D) ;
|
||||
lib b : ../a//a : <file>$(bdll:D=) ;
|
||||
""" % bdll.replace("\\", "\\\\"))
|
||||
t.run_build_system(["hardcode-dll-paths=true"])
|
||||
t.expect_addition("bin/$toolset/debug*/mp.pathlist")
|
||||
|
||||
t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es1)
|
||||
t.expect_content_lines("bin/$toolset/debug*/mp.pathlist", "*" + es2)
|
||||
|
||||
t.cleanup()
|
||||
|
||||
@@ -13,5 +13,5 @@ import sys
|
||||
t = BoostBuild.Tester(['example.python.interpreter=%s' % sys.executable])
|
||||
t.set_tree("../example/make")
|
||||
t.run_build_system()
|
||||
t.expect_addition(["bin/$toolset/debug*/main.cpp"])
|
||||
t.expect_addition(["bin/main.cpp"])
|
||||
t.cleanup()
|
||||
|
||||
@@ -16,7 +16,7 @@ int main() {}
|
||||
""")
|
||||
|
||||
t.write("b.cpp", """
|
||||
#ifdef CF_1
|
||||
#if defined(CF_1) && !defined(CF_IS_OFF)
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
@@ -27,6 +27,36 @@ int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("d.cpp", """
|
||||
#ifndef CF_IS_OFF
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("e.cpp", """
|
||||
#if !defined(CF_IS_OFF) && defined(CF_2) && !defined(CF_1)
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("f.cpp", """
|
||||
#if defined(CF_1)
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("g.cpp", """
|
||||
#if defined(FOPT_2)
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("h.cpp", """
|
||||
#if defined(CX_2)
|
||||
int main() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("jamfile.jam", """
|
||||
# See if default value of composite feature 'cf' will be expanded to
|
||||
# <define>CF_IS_OFF.
|
||||
@@ -38,14 +68,45 @@ exe b : b.cpp : <cf>on-1 ;
|
||||
# See if conditional requirements are recursively expanded.
|
||||
exe c : c.cpp : <toolset>$toolset:<variant>release <variant>release:<define>FOO
|
||||
;
|
||||
|
||||
# Composites specified in the default build should not
|
||||
# be expanded if they are overridden in the the requirements.
|
||||
exe d : d.cpp : <cf>on : <cf>off ;
|
||||
|
||||
# Overriding a feature should clear subfeatures and
|
||||
# apply default values of subfeatures.
|
||||
exe e : e.cpp : <cf>always : <cf>on-1 ;
|
||||
|
||||
# Subfeatures should not be changed if the parent feature doesn't change
|
||||
exe f : f.cpp : <cf>on : <cf>on-1 ;
|
||||
|
||||
# If a subfeature is not specific to the value of the parent feature,
|
||||
# then changing the parent value should not clear the subfeature.
|
||||
exe g : g.cpp : <fopt>off : <fopt>on-2 ;
|
||||
|
||||
# If the default value of a composite feature adds an optional
|
||||
# feature which has a subfeature with a default, then that
|
||||
# default should be added.
|
||||
exe h : h.cpp ;
|
||||
""")
|
||||
|
||||
t.write("jamroot.jam", """
|
||||
import feature ;
|
||||
feature.feature cf : off on : composite incidental ;
|
||||
feature.feature cf : off on always : composite incidental ;
|
||||
feature.compose <cf>off : <define>CF_IS_OFF ;
|
||||
feature.subfeature cf on : version : 1 2 : composite optional incidental ;
|
||||
feature.compose <cf-on:version>1 : <define>CF_1 ;
|
||||
feature.subfeature cf always : version : 1 2 : composite incidental ;
|
||||
feature.compose <cf-always:version>1 : <define>CF_2 ;
|
||||
feature.feature fopt : on off : optional incidental ;
|
||||
feature.subfeature fopt : version : 1 2 : composite incidental ;
|
||||
feature.compose <fopt-version>2 : <define>FOPT_2 ;
|
||||
|
||||
feature.feature cx1 : on : composite incidental ;
|
||||
feature.feature cx2 : on : optional incidental ;
|
||||
feature.subfeature cx2 on : sub : 1 : composite incidental ;
|
||||
feature.compose <cx1>on : <cx2>on ;
|
||||
feature.compose <cx2-on:sub>1 : <define>CX_2 ;
|
||||
""")
|
||||
|
||||
t.expand_toolset("jamfile.jam")
|
||||
@@ -53,7 +114,12 @@ t.expand_toolset("jamfile.jam")
|
||||
t.run_build_system()
|
||||
t.expect_addition(["bin/$toolset/debug*/a.exe",
|
||||
"bin/$toolset/debug*/b.exe",
|
||||
"bin/$toolset/release*/c.exe"])
|
||||
"bin/$toolset/release*/c.exe",
|
||||
"bin/$toolset/debug*/d.exe",
|
||||
"bin/$toolset/debug*/e.exe",
|
||||
"bin/$toolset/debug*/f.exe",
|
||||
"bin/$toolset/debug*/g.exe",
|
||||
"bin/$toolset/debug*/h.exe"])
|
||||
|
||||
t.rm("bin")
|
||||
|
||||
|
||||
142
test/feature_relevant.py
Normal file
142
test/feature_relevant.py
Normal file
@@ -0,0 +1,142 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright 2018 Steven Watanabe
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Tests the <relevant> feature
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(use_test_config=False)
|
||||
|
||||
t.write("xxx.jam", """
|
||||
import type ;
|
||||
import feature : feature ;
|
||||
import toolset : flags ;
|
||||
import generators ;
|
||||
type.register XXX : xxx ;
|
||||
type.register YYY : yyy ;
|
||||
feature xxxflags : : free ;
|
||||
generators.register-standard xxx.run : YYY : XXX ;
|
||||
# xxxflags is relevant because it is used by flags
|
||||
flags xxx.run OPTIONS : <xxxflags> ;
|
||||
actions run
|
||||
{
|
||||
echo okay > $(<)
|
||||
}
|
||||
""")
|
||||
|
||||
t.write("zzz.jam", """
|
||||
import xxx ;
|
||||
import type ;
|
||||
import feature : feature ;
|
||||
import generators ;
|
||||
type.register ZZZ : zzz ;
|
||||
feature zzz.enabled : off on : propagated ;
|
||||
# zzz.enabled is relevant because it is used in the generator's
|
||||
# requirements
|
||||
generators.register-standard zzz.run : XXX : ZZZ : <zzz.enabled>on ;
|
||||
actions run
|
||||
{
|
||||
echo okay > $(<)
|
||||
}
|
||||
""")
|
||||
|
||||
t.write("aaa.jam", """
|
||||
import zzz ;
|
||||
import type ;
|
||||
import feature : feature ;
|
||||
import generators ;
|
||||
import toolset : flags ;
|
||||
type.register AAA : aaa ;
|
||||
feature aaaflags : : free ;
|
||||
generators.register-standard aaa.run : ZZZ : AAA ;
|
||||
flags aaa.run OPTIONS : <aaaflags> ;
|
||||
actions run
|
||||
{
|
||||
echo okay > $(<)
|
||||
}
|
||||
""")
|
||||
|
||||
t.write("Jamroot.jam", """
|
||||
import xxx ;
|
||||
import zzz ;
|
||||
import aaa ;
|
||||
import feature : feature ;
|
||||
|
||||
# f1 is relevant, because it is composite and <xxxflags> is relevant
|
||||
feature f1 : n y : composite propagated ;
|
||||
feature.compose <f1>y : <xxxflags>-no1 ;
|
||||
# f2 is relevant, because it is used in a conditional
|
||||
feature f2 : n y : propagated ;
|
||||
# f3 is relevant, because it is used to choose the target alternative
|
||||
feature f3 : n y : propagated ;
|
||||
# f4 is relevant, because it is marked as such explicitly
|
||||
feature f4 : n y : propagated ;
|
||||
# f5 is relevant because of the conditional usage-requirements
|
||||
feature f5 : n y : propagated ;
|
||||
# f6 is relevant because the indirect conditional indicates so
|
||||
feature f6 : n y : propagated ;
|
||||
# f7 is relevant because the icond7 says so
|
||||
feature f7 : n y : propagated ;
|
||||
|
||||
# The same as f[n], except not propagated
|
||||
feature g1 : n y : composite ;
|
||||
feature.compose <g1>y : <xxxflags>-no1 ;
|
||||
feature g2 : n y ;
|
||||
feature g3 : n y ;
|
||||
feature g4 : n y ;
|
||||
feature g5 : n y ;
|
||||
feature g6 : n y ;
|
||||
feature g7 : n y ;
|
||||
|
||||
project : default-build
|
||||
<f1>y <f2>y <f3>y <f4>y <f5>y <f6>y <f7>y
|
||||
<g1>y <g2>y <g3>y <g4>y <g5>y <g6>y <g7>y <zzz.enabled>on ;
|
||||
|
||||
rule icond6 ( properties * )
|
||||
{
|
||||
local result ;
|
||||
if <f6>y in $(properties) || <g6>y in $(properties)
|
||||
{
|
||||
result += <xxxflags>-yes6 ;
|
||||
}
|
||||
return $(result)
|
||||
<relevant>xxxflags:<relevant>f6
|
||||
<relevant>xxxflags:<relevant>g6 ;
|
||||
}
|
||||
|
||||
rule icond7 ( properties * )
|
||||
{
|
||||
local result ;
|
||||
if <f7>y in $(properties) || <g7>y in $(properties)
|
||||
{
|
||||
result += <aaaflags>-yes7 ;
|
||||
}
|
||||
return $(result)
|
||||
<relevant>aaaflags:<relevant>f7
|
||||
<relevant>aaaflags:<relevant>g7 ;
|
||||
}
|
||||
|
||||
zzz out : in.yyy
|
||||
: <f2>y:<xxxflags>-no2 <g2>y:<xxxflags>-no2 <relevant>f4 <relevant>g4
|
||||
<conditional>@icond6
|
||||
:
|
||||
: <f5>y:<aaaflags>-yes5 <g5>y:<aaaflags>-yes5 <conditional>@icond7
|
||||
;
|
||||
alias out : : <f3>n ;
|
||||
alias out : : <g3>n ;
|
||||
# Features that are relevant for out are also relevant for check-propagate
|
||||
aaa check-propagate : out ;
|
||||
""")
|
||||
|
||||
t.write("in.yyy", "")
|
||||
|
||||
t.run_build_system()
|
||||
t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f6-y/g1-y/g2-y/g3-y/g4-y/g6-y/out.xxx")
|
||||
t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f6-y/g1-y/g2-y/g3-y/g4-y/g6-y/zzz.enabled-on/out.zzz")
|
||||
t.expect_addition("bin/f1-y/f2-y/f3-y/f4-y/f5-y/f6-y/f7-y/zzz.enabled-on/check-propagate.aaa")
|
||||
|
||||
t.cleanup()
|
||||
@@ -1,42 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) Vladimir Prus 2007.
|
||||
# Distributed under the Boost Software License, Version 1.0. (See
|
||||
# accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Tests that a free feature specified on the command line applies to all
|
||||
# targets ever built.
|
||||
|
||||
import BoostBuild
|
||||
|
||||
t = BoostBuild.Tester(use_test_config=False)
|
||||
|
||||
t.write("jamroot.jam", """\
|
||||
exe hello : hello.cpp foo ;
|
||||
lib foo : foo.cpp ;
|
||||
""")
|
||||
|
||||
t.write("hello.cpp", """\
|
||||
extern void foo();
|
||||
#ifdef FOO
|
||||
int main() { foo(); }
|
||||
#endif
|
||||
""")
|
||||
|
||||
t.write("foo.cpp", """\
|
||||
#ifdef FOO
|
||||
#ifdef _WIN32
|
||||
__declspec(dllexport)
|
||||
#endif
|
||||
void foo() {}
|
||||
#endif
|
||||
""")
|
||||
|
||||
# If FOO is not defined when compiling the 'foo' target, we will get a link
|
||||
# error at this point.
|
||||
t.run_build_system(["hello", "define=FOO"])
|
||||
|
||||
t.expect_addition("bin/$toolset/debug*/hello.exe")
|
||||
|
||||
t.cleanup()
|
||||
@@ -20,9 +20,8 @@ t.expect_output_lines("warning: On gcc, DLLs can not be built with "
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.run_build_system(["link=static", "runtime-link=static"])
|
||||
binFolder = "bin/$toolset/debug*/link-static/runtime-link-static"
|
||||
t.expect_addition("%s/hello.obj" % binFolder)
|
||||
t.expect_addition("%s/hello.lib" % binFolder)
|
||||
t.expect_addition("bin/$toolset/debug*/link-static/hello.obj")
|
||||
t.expect_addition("bin/$toolset/debug*/link-static/hello.lib")
|
||||
t.expect_nothing_more()
|
||||
|
||||
t.cleanup()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user