2
0
mirror of https://github.com/boostorg/build.git synced 2026-02-13 12:22:17 +00:00
Files
build/src/tools/builtin.jam
2006-11-05 07:13:39 +00:00

924 lines
30 KiB
Plaintext

# Copyright 2002, 2003, 2004, 2005 Dave Abrahams
# Copyright 2002, 2005, 2006 Rene Rivera
# Copyright 2006 Juergen Hunold
# Copyright 2005 Toon Knapen
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Defines standard features and rules.
import "class" : new ;
import feature : feature compose ;
import toolset : flags ;
import errors : error ;
import type ;
import scanner ;
import generators ;
import regex ;
import virtual-target ;
import os ;
import symlink ;
import alias ;
import property ;
import print ;
import utility ;
import project ;
# This feature is used to determine which OS we're on.
# In future, this may become <target-os> and <host-os>
# The future is now...
local os = [ modules.peek : OS ] ;
feature os : $(os) : propagated link-incompatible ;
# Translates from bjam current OS to the os tags used
# in host-os and target-os. I.e. it returns the
# running host-os.
local rule default-host-os ( )
{
local host-os ;
local os-list = [ feature.values host-os ] ;
if [ os.name ] in $(os-list:U)
{
host-os = [ os.name ] ;
}
else
{
switch [ os.name ]
{
case NT : host-os = windows ;
case AS400 : host-os = unix ;
case MINGW : host-os = windows ;
case BSDI : host-os = bsd ;
case COHERENT : host-os = unix ;
case DRAGONFLYBSD : host-os = bsd ;
case IRIX : host-os = sgi ;
case MACOSX : host-os = darwin ;
case KFREEBSD : host-os = freebsd ;
case LINUX : host-os = linux ;
case * : host-os = unix ;
}
}
return $(host-os:L) ;
}
# The two OS features define a known set of abstract OS
# names. The host-os is the OS under which bjam is running.
# Even though this should really be a fixed property we need
# to list all the values to prevent unkown value errors.
# Both set the default value to the current OS to account for
# the default use case of building on the target OS.
feature host-os
: amiga aix bsd cygwin darwin dos emx freebsd hpux
linux netbsd openbsd osf qnx qnxnto sgi solaris sun sunos
svr4 sysv ultrix unix unixware vms windows
: optional ;
feature.set-default host-os : [ default-host-os ] ;
feature target-os
: amiga aix bsd cygwin darwin dos emx freebsd hpux
linux netbsd openbsd osf qnx qnxnto sgi solaris sun sunos
svr4 sysv ultrix unix unixware vms windows
: propagated link-incompatible ;
feature.set-default target-os : [ default-host-os ] ;
feature toolset : : implicit propagated symmetric ;
feature stdlib : native : propagated composite ;
feature link : shared static : propagated ;
feature runtime-link : shared static : propagated ;
feature runtime-debugging : on off : propagated ;
feature optimization : off speed space : propagated ;
feature profiling : off on : propagated ;
feature inlining : off on full : propagated ;
feature threading : single multi : propagated ;
feature rtti : on off : propagated ;
feature exception-handling : on off : propagated ;
# Whether there is support for asynchronous EH (e.g. catching SEGVs)
feature asynch-exceptions : off on : propagated ;
# Whether all extern "C" functions are considered nothrow by default
feature extern-c-nothrow : off on : propagated ;
feature debug-symbols : on off : propagated ;
feature define : : free ;
feature undef : : free ;
feature "include" : : free path ; #order-sensitive ;
feature cflags : : free ;
feature cxxflags : : free ;
feature fflags : : free ;
feature asmflags : : free ;
feature linkflags : : free ;
feature archiveflags : : free ;
feature version : : free ;
feature.feature location-prefix : : free ;
# The following features are incidental, since
# in themself they have no effect on build products.
# Not making them incidental will result in problems in corner
# cases, for example:
#
# unit-test a : a.cpp : <use>b ;
# lib b : a.cpp b ;
#
# Here, if <use> is not incidental, we'll decide we have two
# targets for a.obj with different properties, and will complain.
#
# Note that making feature incidental does not mean it's ignored. It may
# be ignored when creating the virtual target, but the rest of build process
# will use them.
feature use : : free dependency incidental ;
feature dependency : : free dependency incidental ;
feature implicit-dependency : : free dependency incidental ;
feature warnings :
on # enable default/"reasonable" warning level for the tool
all # enable all possible warnings issued by the tool
off # disable all warnings issued by the tool
: incidental propagated ;
feature warnings-as-errors :
off # do not fail the compilation if there are warnings
on # fail the compilation if there are warnings
: incidental propagated ;
feature source : : free dependency incidental ;
feature library : : free dependency incidental ;
feature file : : free dependency incidental ;
feature find-shared-library : : free ; #order-sensitive ;
feature find-static-library : : free ; #order-sensitive ;
feature library-path : : free path ; #order-sensitive ;
# Internal feature.
feature library-file : : free dependency ;
feature name : : free ;
feature tag : : free ;
feature search : : free path ; #order-sensitive ;
feature location : : free path ;
feature dll-path : : free path ;
feature hardcode-dll-paths : true false : incidental ;
# This is internal feature which holds the paths of all dependency
# dynamic libraries. On Windows, it's needed so that we can all
# those paths to PATH, when running applications.
# On Linux, it's needed to add proper -rpath-link command line options.
feature xdll-path : : free path ;
#provides means to specify def-file for windows dlls.
feature def-file : : free dependency ;
# This is internal feature which is used to store the name of
# bjam action to call when building a target.
feature.feature action : : free ;
# This feature is used to allow specific generators to run.
# For example, QT tools can only be invoked when QT library
# is used. In that case, <allow>qt will be in usage requirement
# of the library.
feature allow : : free ;
# The addressing model to generate code for.
# Currently a limited set only specifying the bit size of pointers.
feature address-model : 16 32 64
: propagated optional ;
# Type of CPU architecture to compile for.
feature architecture :
# x86 and x86-64
x86
# ia64
ia64
# Sparc
sparc
# RS/6000 & PowerPC
power
# MIPS/SGI
mips1 mips2 mips3 mips4 mips32 mips32r2 mips64
#
: propagated optional ;
# The specific instruction set in an architecture to compile.
feature instruction-set :
# x86 and x86-64
i386 i486 i586 i686
pentium pentium-mmx pentiumpro pentium2 pentium3 pentium3m pentium-m pentium4 pentium4m
prescott nocona
k6 k6-2 k6-3 athlon athlon-tbird athlon-4 athlon-xp athlon-mp
k8 opteron athlon64 athlon-fx
winchip-c6 winchip2
c3 c3-2
# ia64
itanium itanium1 merced itanium2 mckinley
# Sparc
v7 cypress v8 supersparc sparclite hypersparc sparclite86x
f930 f934 sparclet tsc701 v9 ultrasparc
# RS/6000 & PowerPC
401 403 405 405fp 440 440fp 505
601 602 603 603e 604 604e 620 630 740 7400 7450 750
801 821 823 860 970 8540
power-common ec603e g3 g4 g5
power power2 power3 power4 power5 powerpc powerpc64
rios rios1 rsc rios2 rs64a
# MIPS
4kc 4kp 5kc 20kc m4k
r2000 r3000 r3900 r4000 r4100 r4300 r4400 r4600 r4650 r6000 r8000
rm7000 rm9000 orion sb1
vr4100 vr4111 vr4120 vr4130 vr4300 vr5000 vr5400 vr5500
#
: propagated optional ;
# Used to select specific variant of C++ ABI is the compiler
# supports several.
feature c++abi : : propagated optional ;
feature conditional : : incidental free ;
# The value of 'no' prevents building of a target.
feature build : yes no : optional ;
# Windows-specific features
feature user-interface : console gui wince native auto ;
feature variant : : implicit composite propagated symmetric ;
# Declares a new variant.
# First determines explicit properties for this variant, by
# refining parents' explicit properties with the passed explicit
# properties. The result is remembered and will be used if
# this variant is used as parent.
#
# Second, determines the full property set for this variant by
# adding to the explicit properties default values for all properties
# which neither present nor are symmetric.
#
# Lastly, makes appropriate value of 'variant' property expand
# to the full property set.
rule variant ( name # Name of the variant
: parents-or-properties * # Specifies parent variants, if
# 'explicit-properties' are given,
# and explicit-properties otherwise.
: explicit-properties * # Explicit properties.
)
{
local parents ;
if ! $(explicit-properties)
{
if $(parents-or-properties[1]:G)
{
explicit-properties = $(parents-or-properties) ;
}
else
{
parents = $(parents-or-properties) ;
}
}
else
{
parents = $(parents-or-properties) ;
}
# The problem is that we have to check for conflicts
# between base variants.
if $(parents[2])
{
error "multiple base variants are not yet supported" ;
}
local inherited ;
# Add explicitly specified properties for parents
for local p in $(parents)
{
# TODO: the check may be sticter
if ! [ feature.is-implicit-value $(p) ]
{
error "Invalid base varaint" $(p) ;
}
inherited += $(.explicit-properties.$(p)) ;
}
property.validate $(explicit-properties) ;
explicit-properties = [ property.refine $(inherited) : $(explicit-properties) ] ;
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
.explicit-properties.$(name) = $(explicit-properties) ;
feature.extend variant : $(name) ;
feature.compose <variant>$(name) : $(explicit-properties) ;
}
IMPORT $(__name__) : variant : : variant ;
variant debug : <optimization>off <debug-symbols>on <inlining>off <runtime-debugging>on ;
variant release : <optimization>speed <debug-symbols>off <inlining>full
<runtime-debugging>off <define>NDEBUG ;
variant profile : release : <profiling>on <debug-symbols>on ;
class searched-lib-target : abstract-file-target
{
rule __init__ ( name
: project
: shared ?
: real-name ?
: search *
: action
)
{
abstract-file-target.__init__ $(name) : SEARCHED_LIB : $(project)
: $(action) ;
self.shared = $(shared) ;
self.real-name = $(real-name) ;
self.real-name ?= $(name) ;
self.search = $(search) ;
}
rule shared ( )
{
return $(self.shared) ;
}
rule real-name ( )
{
return $(self.real-name) ;
}
rule search ( )
{
return $(self.search) ;
}
rule actualize-location ( target )
{
NOTFILE $(target) ;
}
rule path ( )
{
}
}
import types/register ;
import stage ;
class c-scanner : scanner
{
import regex virtual-target path scanner ;
rule __init__ ( includes * )
{
scanner.__init__ ;
for local i in $(includes)
{
self.includes += [ path.native $(i:G=) ] ;
}
}
rule pattern ( )
{
return "#[ \t]*include[ ]*(<(.*)>|\"(.*)\")" ;
}
rule process ( target : matches * : binding )
{
local angle = [ regex.transform $(matches) : "<(.*)>" ] ;
local quoted = [ regex.transform $(matches) : "\"(.*)\"" ] ;
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
local g = [ on $(target) return $(HDRGRIST) ] ;
local b = [ NORMALIZE_PATH $(binding:D) ] ;
# Attach binding of including file to included targets.
# When target is directly created from virtual target
# this extra information is unnecessary. But in other
# cases, it allows to distinguish between two headers of the
# same name included from different places.
# We don't need this extra information for angle includes,
# since they should not depend on including file (we can't
# get literal "." in include path).
local g2 = $(g)"#"$(b) ;
angle = $(angle:G=$(g)) ;
quoted = $(quoted:G=$(g2)) ;
local all = $(angle) $(quoted) ;
INCLUDES $(target) : $(all) ;
NOCARE $(all) ;
SEARCH on $(angle) = $(self.includes:G=) ;
SEARCH on $(quoted) = $(b) $(self.includes:G=) ;
# Just propagate current scanner to includes, in a hope
# that includes do not change scanners.
scanner.propagate $(__name__) : $(angle) $(quoted) : $(target) ;
}
}
scanner.register c-scanner : include ;
type.set-scanner CPP : c-scanner ;
type.register H : h ;
type.register HPP : hpp : H ;
type.register C : c ;
type.set-scanner C : c-scanner ;
# The generator class for libraries (target type LIB). Depending on properties it will
# request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
# SHARED_LIB.
class lib-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
# The lib generator is composing, and can be only invoked with
# explicit name. This check is present in generator.run (and so in
# builtin.linking-generator), but duplicate it here to avoid doing
# extra work.
if $(name)
{
local properties = [ $(property-set).raw ] ;
# Determine the needed target type
local actual-type ;
# <source>files can be generated by <conditional>@rule feature
# in which case we don't consider it a SEARCHED_LIB type.
if ! <source> in $(properties:G) &&
( <search> in $(properties:G) || <name> in $(properties:G) )
{
actual-type = SEARCHED_LIB ;
}
else if <file> in $(properties:G)
{
# The generator for
actual-type = LIB ;
}
else if <link>shared in $(properties)
{
actual-type = SHARED_LIB ;
}
else
{
actual-type = STATIC_LIB ;
}
property-set = [ $(property-set).add-raw <main-target-type>LIB ] ;
# Construct the target.
return [ generators.construct $(project) $(name) : $(actual-type)
: $(property-set) : $(sources) ] ;
}
}
rule viable-source-types ( )
{
return * ;
}
}
generators.register [ new lib-generator builtin.lib-generator : : LIB ] ;
# The implementation of the 'lib' rule. Beyond standard syntax that rule allows
# simplified:
# lib a b c ;
# so we need to write code to handle that syntax.
rule lib ( names + : sources * : requirements * : default-build *
: usage-requirements * )
{
local result ;
local project = [ project.current ] ;
# This is a circular module dependency, so it must be imported here
import targets ;
if $(names[2])
{
if <name> in $(requirements:G)
{
errors.user-error "When several names are given to the 'lib' rule" :
"it's not allowed to specify the <name> feature. " ;
}
if $(sources)
{
errors.user-error "When several names are given to the 'lib' rule" :
"it's not allowed to specify sources. " ;
}
}
for local name in $(names)
{
local r = $(requirements) ;
# Support " lib a ; " and " lib a b c ; " syntaxes.
if ! $(sources) && ! <name> in $(requirements:G)
&& ! <file> in $(requirements:G)
{
r += <name>$(name) ;
}
result += [ targets.main-target-alternative
[ new typed-target $(name) : $(project) : LIB
: [ targets.main-target-sources $(sources) : $(name) ]
: [ targets.main-target-requirements $(r) : $(project) ]
: [ targets.main-target-default-build $(default-build) : $(project) ]
: [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
] ] ;
}
return $(result) ;
}
IMPORT $(__name__) : lib : : lib ;
class searched-lib-generator : generator
{
import property-set ;
rule __init__ ( )
{
# The requirements cause the generators to be tried *only* when we're building
# lib target and there's 'search' feature. This seems ugly --- all we want
# is make sure searched-lib-generator is not invoced deep in transformation
# search.
generator.__init__ searched-lib-generator : : SEARCHED_LIB ;
}
rule run ( project name ? : property-set : sources * )
{
if $(name)
{
# If name is empty, it means we're called not from top-level.
# In this case, we just fail immediately, because searched-lib-generator
# cannot be used to produce intermediate targets.
local properties = [ $(property-set).raw ] ;
local shared ;
if <link>shared in $(properties)
{
shared = true ;
}
local search = [ feature.get-values <search> : $(properties) ] ;
a = [ new null-action $(property-set) ] ;
local t = [ new searched-lib-target $(name) : $(project) : $(shared)
: [ feature.get-values <name> : $(properties) ]
: $(search)
: $(a)
] ;
# We return sources for a simple reason. If there's
# lib png : z : <name>png ;
# the 'z' target should be returned, so that apps linking to
# 'png' will link to 'z', too.
return [ property-set.create <xdll-path>$(search) ]
[ virtual-target.register $(t) ] $(sources) ;
}
}
}
generators.register [ new searched-lib-generator ] ;
class prebuilt-lib-generator : generator
{
rule __init__ ( * : * )
{
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
}
rule run ( project name ? : property-set : sources * )
{
local f = [ $(property-set).get <file> ] ;
return $(f) $(sources) ;
}
}
generators.register
[ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
generators.override builtin.prebuilt : builtin.lib-generator ;
class compile-action : action
{
import sequence ;
rule __init__ ( targets * : sources * : action-name : properties * )
{
action.__init__ $(targets) : $(sources) : $(action-name) : $(properties) ;
}
# For all virtual targets for the same dependency graph as self,
# i.e. which belong to the same main target, add their directories
# to include path.
rule adjust-properties ( property-set )
{
local s = [ $(self.targets[1]).creating-subvariant ] ;
return [ $(property-set).add-raw
[ $(s).implicit-includes "include" : H ] ] ;
}
}
# Declare a special compiler generator.
# The only thing it does is changing the type used to represent
# 'action' in the constructed dependency graph to 'compile-action'.
# That class in turn adds additional include paths to handle a case
# when a source file includes headers which are generated themselfs.
class C-compiling-generator : generator
{
rule __init__ ( id : source-types + : target-types + :
requirements * : optional-properties * )
{
generator.__init__ $(id) : $(source-types) : $(target-types) :
$(requirements) : $(optional-properties) ;
}
rule action-class ( )
{
return compile-action ;
}
}
rule register-c-compiler ( id : source-types + : target-types + :
requirements * : optional-properties * )
{
local g = [ new C-compiling-generator $(id) : $(source-types)
: $(target-types) : $(requirements) : $(optional-properties) ] ;
generators.register $(g) ;
}
# FIXME: this is ugly, should find a better way (we'd want client code to
# register all generators as "generator.some-rule", not with "some-module.some-rule".)
IMPORT $(__name__) : register-c-compiler : : generators.register-c-compiler ;
# The generator class for handling EXE and SHARED_LIB creation.
class linking-generator : generator
{
import property-set ;
import type ;
import path ;
import project ;
rule __init__ ( id
composing ? : # Specify if generator is composing. The generator will be
# composing if non-empty string is passed, or parameter is
# not given. To make generator non-composing, pass empty
# string ("")
source-types + : target-types + :
requirements * )
{
composing ?= true ;
generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
$(requirements) ;
}
rule run ( project name ? : property-set : sources + )
{
sources += [ $(property-set).get <library> ] ;
# Add <library-path> properties for all searched libraries
local extra ;
for local s in $(sources)
{
if [ $(s).type ] = SEARCHED_LIB
{
local search = [ $(s).search ] ;
extra += <library-path>$(search) ;
}
}
# It's possible that sources include shared libraries that
# did not came from 'lib' targets. For example, .so files
# specified as sources.
# In this case we have
# - add extra dll-path properties
# - propagate extra xdll-path properties so that application
# linking to use will get xdll-path to those libraries.
local extra-xdll-paths ;
for local s in $(sources)
{
if [ type.is-derived [ $(s).type ] SHARED_LIB ] && ! [ $(s).action ]
{
# Unfortunately, we don't have a good way to find the path
# to a file, so use this nasty approach.
local p = [ $(s).project ] ;
local location = [ path.root [ $(s).name ]
[ $(p).get source-location ] ] ;
extra-xdll-paths += [ path.parent $(location) ] ;
}
}
# Hardcode dll paths only when linking executables.
# Pros: don't need to relinking libraries when installing.
# Cons: "standalone" libraries (plugins, python extensions)
# can't hardcode paths to dependent libraries.
if [ $(property-set).get <hardcode-dll-paths> ] = true
&& [ type.is-derived $(self.target-types[1]) EXE ]
{
local xdll-path = [ $(property-set).get <xdll-path> ] ;
extra += <dll-path>$(xdll-path) <dll-path>$(extra-xdll-paths) ;
}
if $(extra)
{
property-set = [ $(property-set).add-raw $(extra) ] ;
}
local result = [ generator.run $(project) $(name) : $(property-set)
: $(sources) ] ;
local ur ;
if $(result)
{
ur = [ extra-usage-requirements $(result) : $(property-set) ] ;
ur = [ $(ur).add
[ property-set.create <xdll-path>$(extra-xdll-paths) ] ] ;
}
return $(ur) $(result) ;
}
rule extra-usage-requirements ( created-targets * : property-set )
{
local result = [ property-set.empty ] ;
local extra ;
# Add appropricate <xdll-path> usage requirements.
local raw = [ $(property-set).raw ] ;
if <link>shared in $(raw)
{
local paths ;
local pwd = [ path.pwd ] ;
for local t in $(created-targets)
{
if [ type.is-derived [ $(t).type ] SHARED_LIB ]
{
paths += [ path.root [ path.make [ $(t).path ] ] $(pwd) ] ;
}
}
extra += $(paths:G=<xdll-path>) ;
}
# We need to pass <xdll-path> features that we've got from sources,
# because if shared library is built, exe which uses it must know paths
# to other shared libraries this one depends on, to be able to find them
# all at runtime.
# Just pass all features in property-set, it's theorically possible
# that we'll propagate <xdll-path> features explicitly specified by
# the user, but then the user's to blaim for using internal feature.
local values = [ $(property-set).get <xdll-path> ] ;
extra += $(values:G=<xdll-path>) ;
if $(extra)
{
result = [ property-set.create $(extra) ] ;
}
return $(result) ;
}
rule generated-targets ( sources + : property-set : project name ? )
{
local sources2 ; # sources to pass to inherited rule
local properties2 ; # properties to pass to inherited rule
local libraries ; # sources which are libraries
# Searched libraries are not passed as argument to linker
# but via some option. So, we pass them to the action
# via property.
properties2 = [ $(property-set).raw ] ;
local fsa ;
local fst ;
for local s in $(sources)
{
if [ type.is-derived [ $(s).type ] SEARCHED_LIB ]
{
local name = [ $(s).real-name ] ;
if [ $(s).shared ]
{
fsa += $(name) ;
}
else
{
fst += $(name) ;
}
}
else
{
sources2 += $(s) ;
}
}
properties2 += <find-shared-library>$(fsa:J=&&)
<find-static-library>$(fst:J=&&) ;
local spawn = [ generator.generated-targets $(sources2)
: [ property-set.create $(properties2) ] : $(project) $(name) ] ;
return $(spawn) ;
}
}
rule register-linker ( id composing ? : source-types + : target-types + :
requirements * )
{
local g = [ new linking-generator $(id) $(composing) : $(source-types)
: $(target-types) : $(requirements) ] ;
generators.register $(g) ;
}
# The generator class for handling STATIC_LIB creation.
class archive-generator : generator
{
import property-set ;
rule __init__ ( id composing ? : source-types + : target-types + :
requirements * )
{
composing ?= true ;
generator.__init__ $(id) $(composing) : $(source-types) : $(target-types) :
$(requirements) ;
}
rule run ( project name ? : property-set : sources + )
{
sources += [ $(property-set).get <library> ] ;
local result = [ generator.run $(project) $(name) : $(property-set)
: $(sources) ] ;
# For static linking, if we get a library in source, we can't
# directly link to it. So, we need to cause our dependencies
# to link to that library. There are two approaches:
# - adding the library to the list of returned targets.
# - using the <library> usage requirements.
# The problem with the first is:
#
# lib a1 : : <file>liba1.a ;
# lib a2 : a2.cpp a1 : <link>static ;
# install dist : a2 ;
#
# here we'll try to install 'a1', even though it's not necessary in
# the general case.
# With the second approaches, even indirect dependents will link to
# the library, but it should not cause any harm.
# So, return all LIB sources together with created targets,
# so that dependents link to them.
local usage-requirements ;
if [ $(property-set).get <link> ] = static
{
for local t in $(sources)
{
if [ type.is-derived [ $(t).type ] LIB ]
{
usage-requirements += <library>$(t) ;
}
}
}
usage-requirements = [ property-set.create $(usage-requirements) ] ;
return $(usage-requirements) $(result) ;
}
}
rule register-archiver ( id composing ? : source-types + : target-types + :
requirements * )
{
local g = [ new archive-generator $(id) $(composing) : $(source-types)
: $(target-types) : $(requirements) ] ;
generators.register $(g) ;
}
# Generators that accepts everything, and produces nothing.
# Usefull as general fallback for toolset-specific actions, like
# PCH generation.
class dummy-generator : generator
{
import property-set ;
rule run ( project name ? : property-set : sources + )
{
return [ property-set.empty ] ;
}
}
IMPORT $(__name__) : register-linker register-archiver
: : generators.register-linker generators.register-archiver ;