mirror of
https://github.com/boostorg/build.git
synced 2026-02-13 00:12:11 +00:00
Add experimental Python port
[SVN r55201]
This commit is contained in:
0
src/build/__init__.py
Normal file
0
src/build/__init__.py
Normal file
62
src/build/alias.py
Executable file
62
src/build/alias.py
Executable file
@@ -0,0 +1,62 @@
|
||||
# Copyright 2003, 2004, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Status: ported (danielw)
|
||||
# Base revision: 40480
|
||||
|
||||
# This module defines the 'alias' rule and associated class.
|
||||
#
|
||||
# Alias is just a main target which returns its source targets without any
|
||||
# processing. For example::
|
||||
#
|
||||
# alias bin : hello test_hello ;
|
||||
# alias lib : helpers xml_parser ;
|
||||
#
|
||||
# Another important use of 'alias' is to conveniently group source files::
|
||||
#
|
||||
# alias platform-src : win.cpp : <os>NT ;
|
||||
# alias platform-src : linux.cpp : <os>LINUX ;
|
||||
# exe main : main.cpp platform-src ;
|
||||
#
|
||||
# Lastly, it's possible to create local alias for some target, with different
|
||||
# properties::
|
||||
#
|
||||
# alias big_lib : : @/external_project/big_lib/<link>static ;
|
||||
#
|
||||
|
||||
import targets
|
||||
import property_set
|
||||
from b2.manager import get_manager
|
||||
|
||||
class AliasTarget(targets.BasicTarget):
|
||||
|
||||
def __init__(self, *args):
|
||||
targets.BasicTarget.__init__(self, *args)
|
||||
|
||||
def construct(self, name, source_targets, properties):
|
||||
return [property_set.empty(), source_targets]
|
||||
|
||||
def compute_usage_requirements(self, subvariant):
|
||||
base = targets.BasicTarget.compute_usage_requirements(self, subvariant)
|
||||
# Add source's usage requirement. If we don't do this, "alias" does not
|
||||
# look like 100% alias.
|
||||
return base.add(subvariant.sources_usage_requirements())
|
||||
|
||||
def alias(name, sources, requirements=None, default_build=None, usage_requirements=None):
|
||||
project = get_manager().projects().current()
|
||||
targets = get_manager().targets()
|
||||
|
||||
if default_build:
|
||||
default_build = default_build[0]
|
||||
|
||||
targets.main_target_alternative(AliasTarget(
|
||||
name[0], project,
|
||||
targets.main_target_sources(sources, name),
|
||||
targets.main_target_requirements(requirements or [], project),
|
||||
targets.main_target_default_build(default_build, project),
|
||||
targets.main_target_usage_requirements(usage_requirements or [], project)))
|
||||
|
||||
# Declares the 'alias' target. It will build sources, and return them unaltered.
|
||||
get_manager().projects().add_rule("alias", alias)
|
||||
|
||||
211
src/build/build_request.py
Normal file
211
src/build/build_request.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Status: being ported by Vladimir Prus
|
||||
# TODO: need to re-compare with mainline of .jam
|
||||
# Base revision: 40480
|
||||
#
|
||||
# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
import feature
|
||||
from b2.util import set
|
||||
from b2.util.utility import *
|
||||
|
||||
def expand_no_defaults (property_sets):
|
||||
""" Expand the given build request by combining all property_sets which don't
|
||||
specify conflicting non-free features.
|
||||
"""
|
||||
# First make all features and subfeatures explicit
|
||||
expanded_property_sets = [ __apply_to_property_set (feature.expand_subfeatures, x) for x in property_sets ]
|
||||
|
||||
# Now combine all of the expanded property_sets
|
||||
product = __x_product (expanded_property_sets)
|
||||
|
||||
return product
|
||||
|
||||
def __apply_to_property_set (f, property_set):
|
||||
""" Transform property_set by applying f to each component property.
|
||||
"""
|
||||
properties = feature.split (property_set)
|
||||
return '/'.join (f (properties))
|
||||
|
||||
|
||||
|
||||
def __x_product (property_sets):
|
||||
""" Return the cross-product of all elements of property_sets, less any
|
||||
that would contain conflicting values for single-valued features.
|
||||
"""
|
||||
x_product_seen = []
|
||||
x_product_used = []
|
||||
feature_space = []
|
||||
return __x_product_aux (property_sets, x_product_seen, x_product_used, feature_space)
|
||||
|
||||
def __x_product_aux (property_sets, x_product_seen, x_product_used, feature_space):
|
||||
""" Implementation of __x_product.
|
||||
"""
|
||||
result = []
|
||||
|
||||
if property_sets:
|
||||
p = feature.split (property_sets [0])
|
||||
else:
|
||||
p = []
|
||||
|
||||
f = set.difference (get_grist (p), feature.free_features ())
|
||||
|
||||
seen = []
|
||||
# No conflict with things used at a higher level?
|
||||
if not set.intersection (f, x_product_used):
|
||||
# don't mix in any conflicting features
|
||||
local_x_product_used = x_product_used + f
|
||||
local_x_product_seen = []
|
||||
|
||||
if len (property_sets) > 1:
|
||||
rest = __x_product_aux (property_sets [1:], local_x_product_seen, local_x_product_used, feature_space)
|
||||
result = [ property_sets [0] + '/' + x for x in rest]
|
||||
|
||||
if not result and property_sets:
|
||||
result = [property_sets [0]]
|
||||
|
||||
# If we didn't encounter a conflicting feature lower down,
|
||||
# don't recurse again.
|
||||
if not set.intersection (f, local_x_product_seen):
|
||||
property_sets = []
|
||||
|
||||
seen = local_x_product_seen
|
||||
|
||||
if len (property_sets) > 1:
|
||||
result.extend (__x_product_aux (property_sets [1:], x_product_seen, x_product_used, feature_space))
|
||||
x_product_seen += f + seen
|
||||
|
||||
# Note that we've seen these features so that higher levels will
|
||||
# recurse again without them set.
|
||||
|
||||
return result
|
||||
|
||||
def looks_like_implicit_value(v):
|
||||
"""Returns true if 'v' is either implicit value, or
|
||||
the part before the first '-' symbol is implicit value."""
|
||||
if feature.is_implicit_value(v):
|
||||
return 1
|
||||
else:
|
||||
split = v.split("-")
|
||||
if feature.is_implicit_value(split[0]):
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def from_command_line(command_line):
|
||||
"""Takes the command line tokens (such as taken from ARGV rule)
|
||||
and constructs build request from it. Returns a list of two
|
||||
lists. First is the set of targets specified in the command line,
|
||||
and second is the set of requested build properties."""
|
||||
|
||||
targets = []
|
||||
properties = []
|
||||
|
||||
for e in command_line:
|
||||
if e[0] != "-":
|
||||
# Build request spec either has "=" in it, or completely
|
||||
# consists of implicit feature values.
|
||||
if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
|
||||
properties += convert_command_line_element(e)
|
||||
else:
|
||||
targets.append(e)
|
||||
|
||||
return [targets, properties]
|
||||
|
||||
# Converts one element of command line build request specification into
|
||||
# internal form.
|
||||
def convert_command_line_element(e):
|
||||
|
||||
result = None
|
||||
parts = e.split("/")
|
||||
for p in parts:
|
||||
m = p.split("=")
|
||||
if len(m) > 1:
|
||||
feature = m[0]
|
||||
values = m[1].split(",")
|
||||
lresult = [("<%s>%s" % (feature, v)) for v in values]
|
||||
else:
|
||||
lresult = p.split(",")
|
||||
|
||||
if p.find('-') == -1:
|
||||
# FIXME: first port property.validate
|
||||
# property.validate cannot handle subfeatures,
|
||||
# so we avoid the check here.
|
||||
#for p in lresult:
|
||||
# property.validate(p)
|
||||
pass
|
||||
|
||||
if not result:
|
||||
result = lresult
|
||||
else:
|
||||
result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
|
||||
|
||||
return result
|
||||
|
||||
###
|
||||
### rule __test__ ( )
|
||||
### {
|
||||
### import assert feature ;
|
||||
###
|
||||
### feature.prepare-test build-request-test-temp ;
|
||||
###
|
||||
### import build-request ;
|
||||
### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
|
||||
### import errors : try catch ;
|
||||
### import feature : feature subfeature ;
|
||||
###
|
||||
### feature toolset : gcc msvc borland : implicit ;
|
||||
### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
|
||||
### 3.0 3.0.1 3.0.2 : optional ;
|
||||
###
|
||||
### feature variant : debug release : implicit composite ;
|
||||
### feature inlining : on off ;
|
||||
### feature "include" : : free ;
|
||||
###
|
||||
### feature stdlib : native stlport : implicit ;
|
||||
###
|
||||
### feature runtime-link : dynamic static : symmetric ;
|
||||
###
|
||||
###
|
||||
### local r ;
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : ;
|
||||
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
|
||||
###
|
||||
### try ;
|
||||
### {
|
||||
###
|
||||
### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
|
||||
### }
|
||||
### catch \"static\" is not a value of an implicit feature ;
|
||||
###
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : target ;
|
||||
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : ;
|
||||
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : ;
|
||||
### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
|
||||
### gcc/<runtime-link>static ;
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : ;
|
||||
### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
|
||||
### borland/<runtime-link>static ;
|
||||
###
|
||||
### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
|
||||
### assert.equal [ $(r).get-at 1 ] : ;
|
||||
### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
|
||||
###
|
||||
### feature.finish-test build-request-test-temp ;
|
||||
### }
|
||||
###
|
||||
###
|
||||
159
src/build/engine.py
Normal file
159
src/build/engine.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright Pedro Ferreira 2005.
|
||||
# Copyright Vladimir Prus 2007.
|
||||
# Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
bjam_interface = __import__('bjam')
|
||||
|
||||
import operator
|
||||
|
||||
class BjamAction:
|
||||
"""Class representing bjam action defined from Python."""
|
||||
|
||||
def __init__(self, action_name, function):
|
||||
self.action_name = action_name
|
||||
self.function = function
|
||||
|
||||
def __call__(self, targets, sources, property_set):
|
||||
if self.function:
|
||||
self.function(targets, sources, property_set)
|
||||
# Bjam actions defined from Python have only the command
|
||||
# to execute, and no associated jam procedural code. So
|
||||
# passing 'property_set' to it is not necessary.
|
||||
bjam_interface.call("set-update-action", self.action_name,
|
||||
targets, sources, [])
|
||||
|
||||
class BjamNativeAction:
|
||||
"""Class representing bjam action fully defined by Jam code."""
|
||||
|
||||
def __init__(self, action_name):
|
||||
self.action_name = action_name
|
||||
|
||||
def __call__(self, targets, sources, property_set):
|
||||
if property_set:
|
||||
bjam_interface.call("set-update-action", self.action_name,
|
||||
targets, sources, property_set.raw())
|
||||
else:
|
||||
bjam_interface.call("set-update-action", self.action_name,
|
||||
targets, sources, [])
|
||||
|
||||
action_modifiers = {"updated": 0x01,
|
||||
"together": 0x02,
|
||||
"ignore": 0x04,
|
||||
"quietly": 0x08,
|
||||
"piecemeal": 0x10,
|
||||
"existing": 0x20}
|
||||
|
||||
class Engine:
|
||||
""" The abstract interface to a build engine.
|
||||
|
||||
For now, the naming of targets, and special handling of some
|
||||
target variables like SEARCH and LOCATE make this class coupled
|
||||
to bjam engine.
|
||||
"""
|
||||
def __init__ (self):
|
||||
self.actions = {}
|
||||
|
||||
def add_dependency (self, targets, sources):
|
||||
"""Adds a dependency from 'targets' to 'sources'
|
||||
|
||||
Both 'targets' and 'sources' can be either list
|
||||
of target names, or a single target name.
|
||||
"""
|
||||
if isinstance (targets, str):
|
||||
targets = [targets]
|
||||
if isinstance (sources, str):
|
||||
sources = [sources]
|
||||
|
||||
for target in targets:
|
||||
for source in sources:
|
||||
self.do_add_dependency (target, source)
|
||||
|
||||
def set_target_variable (self, targets, variable, value, append=0):
|
||||
""" Sets a target variable.
|
||||
|
||||
The 'variable' will be available to bjam when it decides
|
||||
where to generate targets, and will also be available to
|
||||
updating rule for that 'taret'.
|
||||
"""
|
||||
if isinstance (targets, str):
|
||||
targets = [targets]
|
||||
|
||||
for target in targets:
|
||||
self.do_set_target_variable (target, variable, value, append)
|
||||
|
||||
def set_update_action (self, action_name, targets, sources, properties):
|
||||
""" Binds a target to the corresponding update action.
|
||||
If target needs to be updated, the action registered
|
||||
with action_name will be used.
|
||||
The 'action_name' must be previously registered by
|
||||
either 'register_action' or 'register_bjam_action'
|
||||
method.
|
||||
"""
|
||||
if isinstance (targets, str):
|
||||
targets = [targets]
|
||||
self.do_set_update_action (action_name, targets, sources, properties)
|
||||
|
||||
def register_action (self, action_name, command, bound_list = [], flags = [],
|
||||
function = None):
|
||||
"""Creates a new build engine action.
|
||||
|
||||
Creates on bjam side an action named 'action_name', with
|
||||
'command' as the command to be executed, 'bound_variables'
|
||||
naming the list of variables bound when the command is executed
|
||||
and specified flag.
|
||||
If 'function' is not None, it should be a callable taking three
|
||||
parameters:
|
||||
- targets
|
||||
- sources
|
||||
- instance of the property_set class
|
||||
This function will be called by set_update_action, and can
|
||||
set additional target variables.
|
||||
"""
|
||||
if self.actions.has_key(action_name):
|
||||
raise "Bjam action %s is already defined" % action_name
|
||||
|
||||
assert(isinstance(flags, list))
|
||||
|
||||
bjam_flags = reduce(operator.or_,
|
||||
(action_modifiers[flag] for flag in flags), 0)
|
||||
|
||||
bjam_interface.define_action(action_name, command, bound_list, bjam_flags)
|
||||
|
||||
self.actions[action_name] = BjamAction(action_name, function)
|
||||
|
||||
def register_bjam_action (self, action_name):
|
||||
"""Informs self that 'action_name' is declared in bjam.
|
||||
|
||||
From this point, 'action_name' is a valid argument to the
|
||||
set_update_action method. The action_name should be callable
|
||||
in the global module of bjam.
|
||||
"""
|
||||
|
||||
# We allow duplicate calls to this rule for the same
|
||||
# action name. This way, jamfile rules that take action names
|
||||
# can just register them without specially checking if
|
||||
# action is already registered.
|
||||
if not self.actions.has_key(action_name):
|
||||
self.actions[action_name] = BjamNativeAction(action_name)
|
||||
|
||||
# Overridables
|
||||
|
||||
|
||||
def do_set_update_action (self, action_name, targets, sources, property_set):
|
||||
action = self.actions.get(action_name)
|
||||
if not action:
|
||||
raise "No action %s was registered" % action_name
|
||||
action(targets, sources, property_set)
|
||||
|
||||
def do_set_target_variable (self, target, variable, value, append):
|
||||
if append:
|
||||
bjam_interface.call("set-target-variable", target, variable, value, "true")
|
||||
else:
|
||||
bjam_interface.call("set-target-variable", target, variable, value)
|
||||
|
||||
def do_add_dependency (self, target, source):
|
||||
bjam_interface.call("DEPENDS", target, source)
|
||||
|
||||
|
||||
122
src/build/errors.py
Normal file
122
src/build/errors.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Status: being written afresh by Vladimir Prus
|
||||
|
||||
# Copyright 2007 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# This file is supposed to implement error reporting for Boost.Build.
|
||||
# Experience with jam version has shown that printing full backtrace
|
||||
# on each error is buffling. Further, for errors printed after parsing --
|
||||
# during target building, the stacktrace does not even mention what
|
||||
# target is being built.
|
||||
|
||||
# This module implements explicit contexts -- where other code can
|
||||
# communicate which projects/targets are being built, and error
|
||||
# messages will show those contexts. For programming errors,
|
||||
# Python assertions are to be used.
|
||||
|
||||
import bjam
|
||||
import traceback
|
||||
import sys
|
||||
|
||||
def format(message, prefix=""):
|
||||
parts = message.split("\n")
|
||||
return "\n".join(prefix+p for p in parts)
|
||||
|
||||
|
||||
class Context:
|
||||
|
||||
def __init__(self, message, nested=None):
|
||||
self.message_ = message
|
||||
self.nested_ = nested
|
||||
|
||||
def report(self, indent=""):
|
||||
print indent + " -", self.message_
|
||||
if self.nested_:
|
||||
print indent + " declared at:"
|
||||
for n in self.nested_:
|
||||
n.report(indent + " ")
|
||||
|
||||
class JamfileContext:
|
||||
|
||||
def __init__(self):
|
||||
raw = bjam.backtrace()
|
||||
self.raw_ = raw
|
||||
|
||||
def report(self, indent=""):
|
||||
for r in self.raw_:
|
||||
print indent + " - %s:%s" % (r[0], r[1])
|
||||
|
||||
class ExceptionWithUserContext(Exception):
|
||||
|
||||
def __init__(self, message, context,
|
||||
original_exception=None, original_tb=None, stack=None):
|
||||
Exception.__init__(self, message)
|
||||
self.context_ = context
|
||||
self.original_exception_ = original_exception
|
||||
self.original_tb_ = original_tb
|
||||
self.stack_ = stack
|
||||
|
||||
def report(self):
|
||||
print "error:", self.message
|
||||
if self.original_exception_:
|
||||
print format(self.original_exception_.message, " ")
|
||||
print
|
||||
print " error context (most recent first):"
|
||||
for c in self.context_[::-1]:
|
||||
c.report()
|
||||
print
|
||||
if "--stacktrace" in bjam.variable("ARGV"):
|
||||
if self.original_tb_:
|
||||
traceback.print_tb(self.original_tb_)
|
||||
elif self.stack_:
|
||||
for l in traceback.format_list(self.stack_):
|
||||
print l,
|
||||
else:
|
||||
print " use the '--stacktrace' option to get Python stacktrace"
|
||||
print
|
||||
|
||||
def user_error_checkpoint(callable):
|
||||
def wrapper(self, *args):
|
||||
errors = self.manager().errors()
|
||||
try:
|
||||
return callable(self, *args)
|
||||
except ExceptionWithUserContext, e:
|
||||
raise
|
||||
except Exception, e:
|
||||
errors.handle_stray_exception(e)
|
||||
finally:
|
||||
errors.pop_user_context()
|
||||
|
||||
return wrapper
|
||||
|
||||
class Errors:
|
||||
|
||||
def __init__(self):
|
||||
self.contexts_ = []
|
||||
|
||||
def push_user_context(self, message, nested=None):
|
||||
self.contexts_.append(Context(message, nested))
|
||||
|
||||
def pop_user_context(self):
|
||||
del self.contexts_[-1]
|
||||
|
||||
def push_jamfile_context(self):
|
||||
self.contexts_.append(JamfileContext())
|
||||
|
||||
def pop_jamfile_context(self):
|
||||
del self.contexts_[-1]
|
||||
|
||||
def capture_user_context(self):
|
||||
return self.contexts_[:]
|
||||
|
||||
def handle_stray_exception(self, e):
|
||||
raise ExceptionWithUserContext("unexpected exception", self.contexts_[:],
|
||||
e, sys.exc_info()[2])
|
||||
def __call__(self, message):
|
||||
raise ExceptionWithUserContext(message, self.contexts_[:],
|
||||
stack=traceback.extract_stack())
|
||||
|
||||
|
||||
|
||||
|
||||
891
src/build/feature.py
Normal file
891
src/build/feature.py
Normal file
@@ -0,0 +1,891 @@
|
||||
# Status: mostly ported.
|
||||
# TODO: carry over tests.
|
||||
# Base revision: 40480
|
||||
#
|
||||
# Copyright 2001, 2002, 2003 Dave Abrahams
|
||||
# Copyright 2002, 2006 Rene Rivera
|
||||
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# TODO: stop using grists to identify the name of features?
|
||||
# create a class for Features and Properties?
|
||||
# represent conditions using object trees, composite pattern?
|
||||
|
||||
import re
|
||||
|
||||
from b2.util import set, utility
|
||||
from b2.util.utility import add_grist, get_grist, ungrist, replace_grist, to_seq
|
||||
from b2.exceptions import *
|
||||
|
||||
__re_split_subfeatures = re.compile ('<(.*):(.*)>')
|
||||
__re_no_hyphen = re.compile ('^([^:]+)$')
|
||||
__re_slash_or_backslash = re.compile (r'[\\/]')
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __all_attributes, __all_features, __implicit_features, __composite_properties
|
||||
global __features_with_attributes, __subfeature_value_to_name, __all_top_features, __free_features
|
||||
global __all_subfeatures
|
||||
|
||||
# The list with all attribute names.
|
||||
__all_attributes = [ 'implicit',
|
||||
'executed',
|
||||
'composite',
|
||||
'optional',
|
||||
'symmetric',
|
||||
'free',
|
||||
'incidental',
|
||||
'path',
|
||||
'dependency',
|
||||
'propagated',
|
||||
'link-incompatible',
|
||||
'subfeature',
|
||||
'order-sensitive'
|
||||
]
|
||||
|
||||
# A map containing all features. The key is the gristed feature name. The value is a map with:
|
||||
# 'values': [],
|
||||
# 'attributes': [],
|
||||
# 'subfeatures': [],
|
||||
# 'default': None
|
||||
__all_features = {}
|
||||
|
||||
# All non-subfeatures.
|
||||
__all_top_features = []
|
||||
|
||||
# Maps valus to the corresponding implicit feature
|
||||
__implicit_features = {}
|
||||
|
||||
# A map containing all composite properties. The key is the name of the property. The value is a map with:
|
||||
# 'components': []
|
||||
__composite_properties = {}
|
||||
|
||||
__features_with_attributes = {}
|
||||
for attribute in __all_attributes:
|
||||
__features_with_attributes [attribute] = []
|
||||
|
||||
# Maps a value to the corresponding subfeature name.
|
||||
__subfeature_value_to_name = {}
|
||||
|
||||
# All free features
|
||||
__free_features = []
|
||||
|
||||
__all_subfeatures = []
|
||||
|
||||
reset ()
|
||||
|
||||
def enumerate ():
|
||||
""" Returns an iterator to the features map.
|
||||
"""
|
||||
return __all_features.iteritems ()
|
||||
|
||||
# FIXME: prepare-test/finish-test?
|
||||
|
||||
def feature (name, values, attributes = []):
|
||||
""" Declares a new feature with the given name, values, and attributes.
|
||||
name: the feature name
|
||||
values: a sequence of the allowable values - may be extended later with feature.extend
|
||||
attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...)
|
||||
"""
|
||||
name = add_grist (name)
|
||||
|
||||
__validate_feature_attributes (name, attributes)
|
||||
|
||||
feature = {
|
||||
'values': [],
|
||||
'attributes': attributes,
|
||||
'subfeatures': [],
|
||||
'default': None
|
||||
}
|
||||
__all_features [name] = feature
|
||||
|
||||
feature ['attributes'] = attributes
|
||||
|
||||
for attribute in attributes:
|
||||
__features_with_attributes [attribute].append (name)
|
||||
|
||||
if 'subfeature' in attributes:
|
||||
__all_subfeatures.append(name)
|
||||
else:
|
||||
__all_top_features.append(name)
|
||||
|
||||
extend (name, values)
|
||||
|
||||
# FIXME: why his is needed.
|
||||
if 'free' in attributes:
|
||||
__free_features.append (name)
|
||||
|
||||
def set_default (feature, value):
|
||||
""" Sets the default value of the given feature, overriding any previous default.
|
||||
feature: the name of the feature
|
||||
value: the default value to assign
|
||||
"""
|
||||
|
||||
if isinstance(feature, list):
|
||||
feature = feature[0]
|
||||
|
||||
feature = add_grist (feature)
|
||||
f = __all_features [feature]
|
||||
|
||||
if isinstance(value, list):
|
||||
value = value[0]
|
||||
|
||||
values = f['values']
|
||||
if not value in values:
|
||||
raise InvalidValue ("The specified default value, '%s' is invalid.\n" % value + "allowed values are: %s" % values)
|
||||
|
||||
f ['default'] = value
|
||||
|
||||
def defaults (features):
|
||||
""" Returns the default property values for the given features.
|
||||
"""
|
||||
result = []
|
||||
for f in features:
|
||||
attributes = __all_features [f]['attributes']
|
||||
if not 'free' in attributes and not 'optional' in attributes:
|
||||
defaults = __all_features [f]['default']
|
||||
if defaults:
|
||||
result.append (replace_grist (defaults, f))
|
||||
|
||||
return result
|
||||
|
||||
def valid (names):
|
||||
""" Returns true iff all elements of names are valid features.
|
||||
"""
|
||||
def valid_one (name): return __all_features.has_key (name)
|
||||
|
||||
if isinstance (names, str):
|
||||
return valid_one (names)
|
||||
else:
|
||||
return [ valid_one (name) for name in names ]
|
||||
|
||||
def attributes (feature):
|
||||
""" Returns the attributes of the given feature.
|
||||
"""
|
||||
return __all_features [feature]['attributes']
|
||||
|
||||
def values (feature):
|
||||
""" Return the values of the given feature.
|
||||
"""
|
||||
validate_feature (feature)
|
||||
return __all_features [feature]['values']
|
||||
|
||||
def is_implicit_value (value_string):
|
||||
""" Returns true iff 'value_string' is a value_string
|
||||
of an implicit feature.
|
||||
"""
|
||||
v = value_string.split('-')
|
||||
|
||||
if not __implicit_features.has_key(v[0]):
|
||||
return False
|
||||
|
||||
feature = __implicit_features[v[0]]
|
||||
|
||||
for subvalue in (v[1:]):
|
||||
if not __find_implied_subfeature(feature, subvalue, v[0]):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def implied_feature (implicit_value):
|
||||
""" Returns the implicit feature associated with the given implicit value.
|
||||
"""
|
||||
components = implicit_value.split('-')
|
||||
|
||||
if not __implicit_features.has_key(components[0]):
|
||||
raise InvalidValue ("'%s' is not a value of an implicit feature" % implicit_value)
|
||||
|
||||
return __implicit_features[components[0]]
|
||||
|
||||
def __find_implied_subfeature (feature, subvalue, value_string):
|
||||
feature = add_grist (feature)
|
||||
if value_string == None: value_string = ''
|
||||
|
||||
if not __subfeature_value_to_name.has_key (feature) \
|
||||
or not __subfeature_value_to_name [feature].has_key (value_string) \
|
||||
or not __subfeature_value_to_name [feature][value_string].has_key (subvalue):
|
||||
return None
|
||||
|
||||
return __subfeature_value_to_name[feature][value_string][subvalue]
|
||||
|
||||
# Given a feature and a value of one of its subfeatures, find the name
|
||||
# of the subfeature. If value-string is supplied, looks for implied
|
||||
# subfeatures that are specific to that value of feature
|
||||
# feature # The main feature name
|
||||
# subvalue # The value of one of its subfeatures
|
||||
# value-string # The value of the main feature
|
||||
|
||||
def implied_subfeature (feature, subvalue, value_string):
|
||||
result = __find_implied_subfeature (feature, subvalue, value_string)
|
||||
if not result:
|
||||
raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
|
||||
|
||||
return result
|
||||
|
||||
def validate_feature (name):
|
||||
""" Checks if all name is a valid feature. Otherwise, raises an exception.
|
||||
"""
|
||||
x = valid (name)
|
||||
if not x:
|
||||
raise InvalidFeature ("'%s' is not a valid feature name" % name)
|
||||
|
||||
def valid (names):
|
||||
""" Returns true iff all elements of names are valid features.
|
||||
"""
|
||||
def valid_one (name): return __all_features.has_key (name)
|
||||
|
||||
if isinstance (names, str):
|
||||
return valid_one (names)
|
||||
else:
|
||||
return [ valid_one (name) for name in names ]
|
||||
|
||||
def __expand_subfeatures_aux (feature, value, dont_validate = False):
|
||||
""" Helper for expand_subfeatures.
|
||||
Given a feature and value, or just a value corresponding to an
|
||||
implicit feature, returns a property set consisting of all component
|
||||
subfeatures and their values. For example:
|
||||
|
||||
expand_subfeatures <toolset>gcc-2.95.2-linux-x86
|
||||
-> <toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
|
||||
equivalent to:
|
||||
expand_subfeatures gcc-2.95.2-linux-x86
|
||||
|
||||
feature: The name of the feature, or empty if value corresponds to an implicit property
|
||||
value: The value of the feature.
|
||||
dont_validate: If True, no validation of value string will be done.
|
||||
"""
|
||||
if not feature:
|
||||
feature = implied_feature(value)
|
||||
else:
|
||||
validate_feature(feature)
|
||||
|
||||
if not dont_validate:
|
||||
validate_value_string(feature, value)
|
||||
|
||||
components = value.split ("-")
|
||||
|
||||
# get the top-level feature's value
|
||||
value = replace_grist(components[0], '')
|
||||
|
||||
result = [ replace_grist(components[0], feature) ]
|
||||
|
||||
subvalues = components[1:]
|
||||
|
||||
while len(subvalues) > 0:
|
||||
subvalue = subvalues [0] # pop the head off of subvalues
|
||||
subvalues = subvalues [1:]
|
||||
|
||||
subfeature = __find_implied_subfeature (feature, subvalue, value)
|
||||
|
||||
# If no subfeature was found, reconstitute the value string and use that
|
||||
if not subfeature:
|
||||
result = '-'.join(components)
|
||||
result = replace_grist (result, feature)
|
||||
return [result]
|
||||
|
||||
f = ungrist (feature)
|
||||
# FIXME: why grist includes '<>'?
|
||||
result.append (replace_grist (subvalue, '<' + f + '-' + subfeature + '>'))
|
||||
|
||||
return result
|
||||
|
||||
def expand_subfeatures (properties, dont_validate = False):
|
||||
"""
|
||||
Make all elements of properties corresponding to implicit features
|
||||
explicit, and express all subfeature values as separate properties
|
||||
in their own right. For example, the property
|
||||
|
||||
gcc-2.95.2-linux-x86
|
||||
|
||||
might expand to
|
||||
|
||||
<toolset>gcc <toolset-version>2.95.2 <toolset-os>linux <toolset-cpu>x86
|
||||
|
||||
properties: A sequence with elements of the form
|
||||
<feature>value-string or just value-string in the
|
||||
case of implicit features.
|
||||
: dont_validate: If True, no validation of value string will be done.
|
||||
"""
|
||||
result = []
|
||||
for p in properties:
|
||||
p_grist = get_grist (p)
|
||||
# Don't expand subfeatures in subfeatures
|
||||
if ':' in p_grist:
|
||||
result.append (p)
|
||||
else:
|
||||
result.extend (__expand_subfeatures_aux (p_grist, replace_grist (p, ''), dont_validate))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
# rule extend was defined as below:
|
||||
# Can be called three ways:
|
||||
#
|
||||
# 1. extend feature : values *
|
||||
# 2. extend <feature> subfeature : values *
|
||||
# 3. extend <feature>value-string subfeature : values *
|
||||
#
|
||||
# * Form 1 adds the given values to the given feature
|
||||
# * Forms 2 and 3 add subfeature values to the given feature
|
||||
# * Form 3 adds the subfeature values as specific to the given
|
||||
# property value-string.
|
||||
#
|
||||
#rule extend ( feature-or-property subfeature ? : values * )
|
||||
#
|
||||
# Now, the specific rule must be called, depending on the desired operation:
|
||||
# extend_feature
|
||||
# extend_subfeature
|
||||
|
||||
def extend (name, values):
|
||||
""" Adds the given values to the given feature.
|
||||
"""
|
||||
name = add_grist (name)
|
||||
__validate_feature (name)
|
||||
feature = __all_features [name]
|
||||
|
||||
if 'implicit' in feature ['attributes']:
|
||||
for v in values:
|
||||
if __implicit_features.has_key (v):
|
||||
raise BaseException ("'%s' is already associated with the feature '%s'" % (v, __implicit_features [v]))
|
||||
|
||||
__implicit_features[v] = name
|
||||
|
||||
if len (feature ['values']) == 0 and len (values) > 0:
|
||||
# This is the first value specified for this feature,
|
||||
# take it as default value
|
||||
feature ['default'] = values[0]
|
||||
|
||||
feature['values'].extend (values)
|
||||
|
||||
def validate_value_string (feature, value_string):
|
||||
""" Checks that value-string is a valid value-string for the given feature.
|
||||
"""
|
||||
f = __all_features [feature]
|
||||
if 'free' in f ['attributes'] or value_string in f ['values']:
|
||||
return
|
||||
|
||||
values = [value_string]
|
||||
|
||||
if f['subfeatures']:
|
||||
values = value_string.split('-')
|
||||
|
||||
# An empty value is allowed for optional features
|
||||
if not values[0] in f['values'] and \
|
||||
(values[0] or not 'optional' in f['attributes']):
|
||||
raise InvalidValue ("'%s' is not a known value of feature '%s'\nlegal values: '%s'" % (values [0], feature, f ['values']))
|
||||
|
||||
for v in values [1:]:
|
||||
# this will validate any subfeature values in value-string
|
||||
implied_subfeature(feature, v, values[0])
|
||||
|
||||
|
||||
""" Extends the given subfeature with the subvalues. If the optional
|
||||
value-string is provided, the subvalues are only valid for the given
|
||||
value of the feature. Thus, you could say that
|
||||
<target-platform>mingw is specifc to <toolset>gcc-2.95.2 as follows:
|
||||
|
||||
extend-subfeature toolset gcc-2.95.2 : target-platform : mingw ;
|
||||
|
||||
feature: The feature whose subfeature is being extended.
|
||||
|
||||
value-string: If supplied, specifies a specific value of the
|
||||
main feature for which the new subfeature values
|
||||
are valid.
|
||||
|
||||
subfeature: The name of the subfeature.
|
||||
|
||||
subvalues: The additional values of the subfeature being defined.
|
||||
"""
|
||||
def extend_subfeature (feature, value_string, subfeature, subvalues):
|
||||
feature = add_grist (feature)
|
||||
validate_feature (feature)
|
||||
|
||||
if value_string:
|
||||
validate_value_string (feature, value_string)
|
||||
|
||||
subfeature_name = __get_subfeature_name (subfeature, value_string)
|
||||
|
||||
f = ungrist (feature)
|
||||
extend (f + '-' + subfeature_name, subvalues) ;
|
||||
|
||||
__add_to_subfeature_value_to_name_map (feature, value_string, subfeature_name, subvalues)
|
||||
|
||||
def subfeature (feature_name, value_string, subfeature, subvalues, attributes = []):
|
||||
""" Declares a subfeature.
|
||||
feature_name: Root feature that is not a subfeature.
|
||||
value_string: An optional value-string specifying which feature or
|
||||
subfeature values this subfeature is specific to,
|
||||
if any.
|
||||
subfeature: The name of the subfeature being declared.
|
||||
subvalues: The allowed values of this subfeature.
|
||||
attributes: The attributes of the subfeature.
|
||||
"""
|
||||
feature_name = add_grist (feature_name)
|
||||
validate_feature (feature_name)
|
||||
|
||||
# Add grist to the subfeature name if a value-string was supplied
|
||||
subfeature_name = __get_subfeature_name (subfeature, value_string)
|
||||
|
||||
if subfeature_name in __all_features [feature_name]['subfeatures']:
|
||||
message = "'%s' already declared as a subfeature of '%s'" % (subfeature, feature_name)
|
||||
message += " specific to '%s'" % value_string
|
||||
raise BaseException (message)
|
||||
|
||||
__all_features [feature_name]['subfeatures'].append (subfeature_name)
|
||||
|
||||
# First declare the subfeature as a feature in its own right
|
||||
f = ungrist (feature_name)
|
||||
feature (f + '-' + subfeature_name, subvalues, attributes + ['subfeature'])
|
||||
|
||||
# Now make sure the subfeature values are known.
|
||||
extend_subfeature (feature_name, value_string, subfeature, subvalues)
|
||||
|
||||
def compose (composite_property, component_properties):
|
||||
""" Sets the components of the given composite property.
|
||||
"""
|
||||
component_properties = to_seq (component_properties)
|
||||
|
||||
feature = get_grist (composite_property)
|
||||
if not 'composite' in attributes (feature):
|
||||
raise BaseException ("'%s' is not a composite feature" % feature)
|
||||
|
||||
if __composite_properties.has_key (composite_property):
|
||||
raise BaseException ('components of "%s" already set: %s' % (composite_property, str (__composite_properties [composite_property]['components'])))
|
||||
|
||||
if composite_property in component_properties:
|
||||
raise BaseException ('composite property "%s" cannot have itself as a component' % composite_property)
|
||||
|
||||
entry = { 'components': component_properties }
|
||||
__composite_properties [composite_property] = entry
|
||||
|
||||
|
||||
def expand_composite (property):
|
||||
result = [ property ]
|
||||
if __composite_properties.has_key (property):
|
||||
for p in __composite_properties [property]['components']:
|
||||
result.extend (expand_composite (p))
|
||||
return result
|
||||
|
||||
|
||||
def get_values (feature, properties):
|
||||
""" Returns all values of the given feature specified by the given property set.
|
||||
"""
|
||||
result = []
|
||||
for p in properties:
|
||||
if get_grist (p) == feature:
|
||||
result.append (replace_grist (p, ''))
|
||||
|
||||
return result
|
||||
|
||||
def free_features ():
|
||||
""" Returns all free features.
|
||||
"""
|
||||
return __free_features
|
||||
|
||||
def expand_composites (properties):
|
||||
""" Expand all composite properties in the set so that all components
|
||||
are explicitly expressed.
|
||||
"""
|
||||
explicit_features = get_grist (properties)
|
||||
|
||||
result = []
|
||||
|
||||
# now expand composite features
|
||||
for p in properties:
|
||||
expanded = expand_composite (p)
|
||||
|
||||
for x in expanded:
|
||||
if not x in result:
|
||||
f = get_grist (x)
|
||||
|
||||
if f in __free_features:
|
||||
result.append (x)
|
||||
elif not x in properties: # x is the result of expansion
|
||||
if not f in explicit_features: # not explicitly-specified
|
||||
if f in get_grist (result):
|
||||
raise FeatureConflict ("expansions of composite features result in "
|
||||
"conflicting values for '%s'\nvalues: '%s'\none contributing composite property was '%s'" % (f,
|
||||
get_values (f, result) + [replace_grist (x, '')], p))
|
||||
else:
|
||||
result.append (x)
|
||||
elif f in get_grist (result):
|
||||
raise FeatureConflict ("explicitly-specified values of non-free feature '%s' conflict\n"
|
||||
"existing values: '%s'\nvalue from expanding '%s': '%s'" % (f,
|
||||
get_values (f, properties), p, replace_grist (x, '')))
|
||||
else:
|
||||
result.append (x)
|
||||
|
||||
return result
|
||||
|
||||
def is_subfeature_of (parent_property, f):
|
||||
""" Return true iff f is an ordinary subfeature of the parent_property's
|
||||
feature, or if f is a subfeature of the parent_property's feature
|
||||
specific to the parent_property's value.
|
||||
"""
|
||||
if not valid (f) or not 'subfeature' in __all_features [f]['attributes']:
|
||||
return False
|
||||
|
||||
specific_subfeature = __re_split_subfeatures.match (f)
|
||||
|
||||
if specific_subfeature:
|
||||
# The feature has the form
|
||||
# <topfeature-topvalue:subfeature>,
|
||||
# e.g. <toolset-msvc:version>
|
||||
feature_value = split_top_feature(specific_subfeature.group(1))
|
||||
if replace_grist (feature_value [1], '<' + feature_value [0] + '>') == parent_property:
|
||||
return True
|
||||
else:
|
||||
# The feature has the form <topfeature-subfeature>,
|
||||
# e.g. <toolset-version>
|
||||
top_sub = split_top_feature (ungrist (f))
|
||||
|
||||
if top_sub [1] and add_grist (top_sub [0]) == get_grist (parent_property):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def __is_subproperty_of (parent_property, p):
|
||||
""" As is_subfeature_of, for subproperties.
|
||||
"""
|
||||
return is_subfeature_of (parent_property, get_grist (p))
|
||||
|
||||
|
||||
# Returns true iff the subvalue is valid for the feature. When the
|
||||
# optional value-string is provided, returns true iff the subvalues
|
||||
# are valid for the given value of the feature.
|
||||
def is_subvalue(feature, value_string, subfeature, subvalue):
|
||||
|
||||
if not value_string:
|
||||
value_string = ''
|
||||
|
||||
if not __subfeature_value_to_name.has_key(feature):
|
||||
return False
|
||||
|
||||
if not __subfeature_value_to_name[feature].has_key(value_string):
|
||||
return False
|
||||
|
||||
if not __subfeature_value_to_name[feature][value_string].has_key(subvalue):
|
||||
return False
|
||||
|
||||
if __subfeature_value_to_name[feature][value_string][subvalue]\
|
||||
!= subfeature:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
def implied_subfeature (feature, subvalue, value_string):
|
||||
result = __find_implied_subfeature (feature, subvalue, value_string)
|
||||
if not result:
|
||||
raise InvalidValue ("'%s' is not a known subfeature value of '%s%s'" % (subvalue, feature, value_string))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def expand (properties):
|
||||
""" Given a property set which may consist of composite and implicit
|
||||
properties and combined subfeature values, returns an expanded,
|
||||
normalized property set with all implicit features expressed
|
||||
explicitly, all subfeature values individually expressed, and all
|
||||
components of composite properties expanded. Non-free features
|
||||
directly expressed in the input properties cause any values of
|
||||
those features due to composite feature expansion to be dropped. If
|
||||
two values of a given non-free feature are directly expressed in the
|
||||
input, an error is issued.
|
||||
"""
|
||||
expanded = expand_subfeatures (properties)
|
||||
return expand_composites (expanded)
|
||||
|
||||
|
||||
def split_top_feature (feature_plus):
|
||||
""" Given an ungristed string, finds the longest prefix which is a
|
||||
top-level feature name followed by a dash, and return a pair
|
||||
consisting of the parts before and after that dash. More
|
||||
interesting than a simple split because feature names can contain
|
||||
dashes.
|
||||
"""
|
||||
e = feature_plus.split ('-')
|
||||
f = e [0]
|
||||
|
||||
v = None
|
||||
while e:
|
||||
if add_grist (f) in __all_top_features:
|
||||
if len (e) > 1:
|
||||
after = '-'.join (e [1:])
|
||||
else:
|
||||
after = ''
|
||||
|
||||
v = (f, after)
|
||||
|
||||
e = e [1:]
|
||||
f = f + '-'
|
||||
if len (e): f += e [0]
|
||||
|
||||
return v
|
||||
|
||||
def add_defaults (properties):
|
||||
""" Given a set of properties, add default values for features not
|
||||
represented in the set.
|
||||
Note: if there's there's ordinary feature F1 and composite feature
|
||||
F2, which includes some value for F1, and both feature have default values,
|
||||
then the default value of F1 will be added, not the value in F2. This might
|
||||
not be right idea: consider
|
||||
|
||||
feature variant : debug ... ;
|
||||
<variant>debug : .... <runtime-debugging>on
|
||||
feature <runtime-debugging> : off on ;
|
||||
|
||||
Here, when adding default for an empty property set, we'll get
|
||||
|
||||
<variant>debug <runtime_debugging>off
|
||||
|
||||
and that's kind of strange.
|
||||
"""
|
||||
result = [ x for x in properties ]
|
||||
|
||||
for v in replace_grist (properties, ''):
|
||||
if v in properties:
|
||||
raise BaseException ("'add_defaults' requires explicitly specified features, but '%s' appears to be the value of an un-expanded implicit feature" % v)
|
||||
|
||||
# We don't add default for elements with ":" inside. This catches:
|
||||
# 1. Conditional properties --- we don't want <variant>debug:<define>DEBUG
|
||||
# to be takes as specified value for <variant>
|
||||
# 2. Free properties with ":" in values. We don't care, since free properties
|
||||
# don't have defaults.
|
||||
xproperties = [ property for property in properties if __re_no_hyphen.match (property) ]
|
||||
missing_top = set.difference (__all_top_features, get_grist (xproperties))
|
||||
more = defaults (missing_top)
|
||||
result += more
|
||||
xproperties += more
|
||||
|
||||
# Add defaults for subfeatures of features which are present
|
||||
for p in xproperties:
|
||||
gp = get_grist (p)
|
||||
s = []
|
||||
if __all_features.has_key (gp):
|
||||
s = __all_features [gp]['subfeatures']
|
||||
f = ungrist (gp)
|
||||
|
||||
xbase = ['<%s-%s>' % (f, xs) for xs in s]
|
||||
|
||||
missing_subs = set.difference (xbase, get_grist (result))
|
||||
result += defaults (__select_subfeatures (p, missing_subs))
|
||||
|
||||
return result
|
||||
|
||||
def minimize (properties):
|
||||
""" Given an expanded property set, eliminate all redundancy: properties
|
||||
which are elements of other (composite) properties in the set will
|
||||
be eliminated. Non-symmetric properties equal to default values will be
|
||||
eliminated, unless the override a value from some composite property.
|
||||
Implicit properties will be expressed without feature
|
||||
grist, and sub-property values will be expressed as elements joined
|
||||
to the corresponding main property.
|
||||
"""
|
||||
# FXIME: the code below was in the original feature.jam file, however 'p' is not defined.
|
||||
# # Precondition checking
|
||||
# local implicits = [ set.intersection $(p:G=) : $(p:G) ] ;
|
||||
# if $(implicits)
|
||||
# {
|
||||
# error minimize requires an expanded property set, but \"$(implicits[1])\"
|
||||
# appears to be the value of an un-expanded implicit feature ;
|
||||
# }
|
||||
|
||||
# remove properties implied by composite features
|
||||
components = []
|
||||
for property in properties:
|
||||
if __composite_properties.has_key (property):
|
||||
components.extend (__composite_properties [property]['components'])
|
||||
|
||||
x = set.difference (properties, components)
|
||||
|
||||
# handle subfeatures and implicit features
|
||||
x = __move_subfeatures_to_the_end (x)
|
||||
|
||||
result = []
|
||||
while x:
|
||||
fullp = x [0]
|
||||
p = fullp
|
||||
f = get_grist (p)
|
||||
v = replace_grist (p, '')
|
||||
|
||||
# eliminate features in implicit properties.
|
||||
if 'implicit' in __all_features [f]['attributes']:
|
||||
p = v
|
||||
|
||||
# locate all subproperties of $(x[1]) in the property set
|
||||
subproperties = __select_subproperties (fullp, x)
|
||||
|
||||
if subproperties:
|
||||
# reconstitute the joined property name
|
||||
subproperties.sort ()
|
||||
joined = p + '-' + '-'.join (replace_grist (subproperties, ''))
|
||||
result.append (joined)
|
||||
|
||||
x = set.difference (x [1:], subproperties)
|
||||
|
||||
else:
|
||||
# eliminate properties whose value is equal to feature's
|
||||
# default and which are not symmetric and which do not
|
||||
# contradict values implied by composite properties.
|
||||
|
||||
# since all component properties of composites in the set
|
||||
# have been eliminated, any remaining property whose
|
||||
# feature is the same as a component of a composite in the
|
||||
# set must have a non-redundant value.
|
||||
if [fullp] != defaults ([f]) or 'symmetric' in attributes (f)\
|
||||
or get_grist (fullp) in get_grist (components):
|
||||
result.append (p)
|
||||
|
||||
x = x [1:]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def split (properties):
|
||||
""" Given a property-set of the form
|
||||
v1/v2/...vN-1/<fN>vN/<fN+1>vN+1/...<fM>vM
|
||||
|
||||
Returns
|
||||
v1 v2 ... vN-1 <fN>vN <fN+1>vN+1 ... <fM>vM
|
||||
|
||||
Note that vN...vM may contain slashes. This is resilient to the
|
||||
substitution of backslashes for slashes, since Jam, unbidden,
|
||||
sometimes swaps slash direction on NT.
|
||||
"""
|
||||
|
||||
def split_one (properties):
|
||||
pieces = re.split (__re_slash_or_backslash, properties)
|
||||
result = []
|
||||
|
||||
for x in pieces:
|
||||
if not get_grist (x) and len (result) > 0 and get_grist (result [-1]):
|
||||
result = result [0:-1] + [ result [-1] + '/' + x ]
|
||||
else:
|
||||
result.append (x)
|
||||
|
||||
return result
|
||||
|
||||
if isinstance (properties, str):
|
||||
return split_one (properties)
|
||||
|
||||
result = []
|
||||
for p in properties:
|
||||
result += split_one (p)
|
||||
return result
|
||||
|
||||
|
||||
def compress_subproperties (properties):
|
||||
""" Combine all subproperties into their parent properties
|
||||
|
||||
Requires: for every subproperty, there is a parent property. All
|
||||
features are explicitly expressed.
|
||||
|
||||
This rule probably shouldn't be needed, but
|
||||
build-request.expand-no-defaults is being abused for unintended
|
||||
purposes and it needs help
|
||||
"""
|
||||
result = []
|
||||
matched_subs = []
|
||||
for p in properties:
|
||||
pg = get_grist (p)
|
||||
if not pg:
|
||||
raise BaseException ("Gristed variable exppected. Got '%s'." % p)
|
||||
|
||||
if not 'subfeature' in __all_features [pg]['attributes']:
|
||||
subs = __select_subproperties (p, properties)
|
||||
|
||||
matched_subs.extend (subs)
|
||||
|
||||
subvalues = '-'.join (get_value (subs))
|
||||
if subvalues: subvalues = '-' + subvalues
|
||||
|
||||
result.append (p + subvalues)
|
||||
|
||||
else:
|
||||
all_subs.append (p)
|
||||
|
||||
# TODO: this variables are used just for debugging. What's the overhead?
|
||||
assert (set.equal (all_subs, matched_subs))
|
||||
|
||||
return result
|
||||
|
||||
######################################################################################
|
||||
# Private methods
|
||||
|
||||
def __select_subproperties (parent_property, properties):
|
||||
return [ x for x in properties if __is_subproperty_of (parent_property, x) ]
|
||||
|
||||
def __move_subfeatures_to_the_end (properties):
|
||||
""" Helper for minimize, below - returns the list with
|
||||
the same properties, but where all subfeatures
|
||||
are in the end of the list
|
||||
"""
|
||||
x1 = []
|
||||
x2 = []
|
||||
for p in properties:
|
||||
if 'subfeature' in __all_features [get_grist (p)]['attributes']:
|
||||
x2.append (p)
|
||||
|
||||
else:
|
||||
x1.append (p)
|
||||
|
||||
return x1 + x2
|
||||
|
||||
def __get_subfeature_name (subfeature, value_string):
|
||||
if value_string == None:
|
||||
prefix = ''
|
||||
else:
|
||||
prefix = value_string + ':'
|
||||
|
||||
return prefix + subfeature
|
||||
|
||||
|
||||
def __validate_feature_attributes (name, attributes):
|
||||
for attribute in attributes:
|
||||
if not attribute in __all_attributes:
|
||||
raise InvalidAttribute ("unknown attributes: '%s' in feature declaration: '%s'" % (str (set.difference (attributes, __all_attributes)), name))
|
||||
|
||||
if name in __all_features:
|
||||
raise AlreadyDefined ("feature '%s' already defined" % name)
|
||||
elif 'implicit' in attributes and 'free' in attributes:
|
||||
raise InvalidAttribute ("free features cannot also be implicit (in declaration of feature '%s')" % name)
|
||||
elif 'free' in attributes and 'propagated' in attributes:
|
||||
raise InvalidAttribute ("free features cannot also be propagated (in declaration of feature '%s')" % name)
|
||||
|
||||
|
||||
def __validate_feature (feature):
|
||||
""" Generates an error if the feature is unknown.
|
||||
"""
|
||||
if not __all_features.has_key (feature):
|
||||
raise BaseException ('unknown feature "%s"' % feature)
|
||||
|
||||
def __add_to_subfeature_value_to_name_map (feature, value_string, subfeature_name, subvalues):
|
||||
# provide a way to get from the given feature or property and
|
||||
# subfeature value to the subfeature name.
|
||||
if value_string == None: value_string = ''
|
||||
|
||||
if not __subfeature_value_to_name.has_key (feature):
|
||||
__subfeature_value_to_name [feature] = {}
|
||||
|
||||
if not __subfeature_value_to_name [feature].has_key (value_string):
|
||||
__subfeature_value_to_name [feature][value_string] = {}
|
||||
|
||||
for subvalue in subvalues:
|
||||
__subfeature_value_to_name [feature][value_string][subvalue] = subfeature_name
|
||||
|
||||
|
||||
def __select_subfeatures (parent_property, features):
|
||||
""" Given a property, return the subset of features consisting of all
|
||||
ordinary subfeatures of the property's feature, and all specific
|
||||
subfeatures of the property's feature which are conditional on the
|
||||
property's value.
|
||||
"""
|
||||
return [f for f in features if is_subfeature_of (parent_property, f)]
|
||||
|
||||
# FIXME: copy over tests.
|
||||
967
src/build/generators.py
Normal file
967
src/build/generators.py
Normal file
@@ -0,0 +1,967 @@
|
||||
# Status: being ported by Vladimir Prus
|
||||
# Base revision: 41557
|
||||
# TODO: replace the logging with dout
|
||||
|
||||
# Copyright Vladimir Prus 2002.
|
||||
# Copyright Rene Rivera 2006.
|
||||
#
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Manages 'generators' --- objects which can do transformation between different
|
||||
# target types and contain algorithm for finding transformation from sources
|
||||
# to targets.
|
||||
#
|
||||
# The main entry point to this module is generators.construct rule. It is given
|
||||
# a list of source targets, desired target type and a set of properties.
|
||||
# It starts by selecting 'viable generators', which have any chances of producing
|
||||
# the desired target type with the required properties. Generators are ranked and
|
||||
# a set of most specific ones is selected.
|
||||
#
|
||||
# The most specific generators have their 'run' methods called, with the properties
|
||||
# and list of sources. Each one selects target which can be directly consumed, and
|
||||
# tries to convert the remaining ones to the types it can consume. This is done
|
||||
# by recursively calling 'construct' with all consumable types.
|
||||
#
|
||||
# If the generator has collected all the targets it needs, it creates targets
|
||||
# corresponding to result, and returns it. When all generators have been run,
|
||||
# results of one of them are selected and returned as result.
|
||||
#
|
||||
# It's quite possible that 'construct' returns more targets that it was asked for.
|
||||
# For example, it was asked to target type EXE, but the only found generators produces
|
||||
# both EXE and TDS (file with debug) information. The extra target will be returned.
|
||||
#
|
||||
# Likewise, when generator tries to convert sources to consumable types, it can get
|
||||
# more targets that it was asked for. The question is what to do with extra targets.
|
||||
# Boost.Build attempts to convert them to requested types, and attempts as early as
|
||||
# possible. Specifically, this is done after invoking each generator. (Later I'll
|
||||
# document the rationale for trying extra target conversion at that point).
|
||||
#
|
||||
# That early conversion is not always desirable. Suppose a generator got a source of
|
||||
# type Y and must consume one target of type X_1 and one target of type X_2.
|
||||
# When converting Y to X_1 extra target of type Y_2 is created. We should not try to
|
||||
# convert it to type X_1, because if we do so, the generator will get two targets
|
||||
# of type X_1, and will be at loss as to which one to use. Because of that, the
|
||||
# 'construct' rule has a parameter, telling if multiple targets can be returned. If
|
||||
# the parameter is false, conversion of extra targets is not performed.
|
||||
|
||||
|
||||
import re
|
||||
import cStringIO
|
||||
import os.path
|
||||
|
||||
from virtual_target import Subvariant
|
||||
import virtual_target, type, property_set, property
|
||||
from b2.util.logger import *
|
||||
from b2.util.utility import *
|
||||
from b2.util import set
|
||||
from b2.util.sequence import unique
|
||||
import b2.util.sequence as sequence
|
||||
from b2.manager import get_manager
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __generators, __type_to_generators, __generators_for_toolset, __construct_stack
|
||||
global __overrides, __active_generators
|
||||
global __viable_generators_cache, __viable_source_types_cache
|
||||
|
||||
__generators = {}
|
||||
__type_to_generators = {}
|
||||
__generators_for_toolset = {}
|
||||
__overrides = {}
|
||||
|
||||
# TODO: can these be global?
|
||||
__construct_stack = []
|
||||
__viable_generators_cache = {}
|
||||
__viable_source_types_cache = {}
|
||||
__active_generators = []
|
||||
|
||||
reset ()
|
||||
|
||||
_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?')
|
||||
_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?')
|
||||
|
||||
|
||||
__debug = None
|
||||
__indent = ""
|
||||
|
||||
def debug():
|
||||
global __debug
|
||||
if __debug is None:
|
||||
__debug = "--debug-generators" in bjam.variable("ARGV")
|
||||
return __debug
|
||||
|
||||
def increase_indent():
|
||||
global __indent
|
||||
__indent += " "
|
||||
|
||||
def decrease_indent():
|
||||
global __indent
|
||||
__indent = __indent[0:-4]
|
||||
|
||||
def dout(message):
|
||||
if debug():
|
||||
print __indent + message
|
||||
|
||||
def normalize_target_list (targets):
|
||||
""" Takes a vector of 'virtual-target' instances and makes a normalized
|
||||
representation, which is the same for given set of targets,
|
||||
regardless of their order.
|
||||
"""
|
||||
return (targets[0], targets[1].sort ())
|
||||
|
||||
|
||||
class Generator:
|
||||
""" Creates a generator.
|
||||
manager: the build manager.
|
||||
id: identifies the generator
|
||||
|
||||
rule: the rule which sets up build actions.
|
||||
|
||||
composing: whether generator processes each source target in
|
||||
turn, converting it to required types.
|
||||
Ordinary generators pass all sources together to
|
||||
recusrive generators.construct_types call.
|
||||
|
||||
source_types (optional): types that this generator can handle
|
||||
|
||||
target_types_and_names: types the generator will create and, optionally, names for
|
||||
created targets. Each element should have the form
|
||||
type["(" name-pattern ")"]
|
||||
for example, obj(%_x). Name of generated target will be found
|
||||
by replacing % with the name of source, provided explicit name
|
||||
was not specified.
|
||||
|
||||
requirements (optional)
|
||||
|
||||
NOTE: all subclasses must have a similar signature for clone to work!
|
||||
"""
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
assert(not isinstance(source_types, str))
|
||||
assert(not isinstance(target_types_and_names, str))
|
||||
self.id_ = id
|
||||
self.composing_ = composing
|
||||
self.source_types_ = source_types
|
||||
self.target_types_and_names_ = target_types_and_names
|
||||
self.requirements_ = requirements
|
||||
|
||||
self.target_types_ = []
|
||||
self.name_prefix_ = []
|
||||
self.name_postfix_ = []
|
||||
|
||||
for e in target_types_and_names:
|
||||
# Create three parallel lists: one with the list of target types,
|
||||
# and two other with prefixes and postfixes to be added to target
|
||||
# name. We use parallel lists for prefix and postfix (as opposed
|
||||
# to mapping), because given target type might occur several times,
|
||||
# for example "H H(%_symbols)".
|
||||
m = _re_separate_types_prefix_and_postfix.match (e)
|
||||
|
||||
if not m:
|
||||
raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id))
|
||||
|
||||
target_type = m.group (1)
|
||||
if not target_type: target_type = ''
|
||||
prefix = m.group (3)
|
||||
if not prefix: prefix = ''
|
||||
postfix = m.group (4)
|
||||
if not postfix: postfix = ''
|
||||
|
||||
self.target_types_.append (target_type)
|
||||
self.name_prefix_.append (prefix)
|
||||
self.name_postfix_.append (postfix)
|
||||
|
||||
for x in self.source_types_:
|
||||
type.validate (x)
|
||||
|
||||
for x in self.target_types_:
|
||||
type.validate (x)
|
||||
|
||||
def clone (self, new_id, new_toolset_properties):
|
||||
""" Returns another generator which differers from $(self) in
|
||||
- id
|
||||
- value to <toolset> feature in properties
|
||||
"""
|
||||
return self.__class__ (new_id,
|
||||
self.composing_,
|
||||
self.source_types_,
|
||||
self.target_types_and_names_,
|
||||
# Note: this does not remove any subfeatures of <toolset>
|
||||
# which might cause problems
|
||||
property.change (self.requirements_, '<toolset>') + new_toolset_properties)
|
||||
|
||||
def clone_and_change_target_type(self, base, type):
|
||||
"""Creates another generator that is the same as $(self), except that
|
||||
if 'base' is in target types of $(self), 'type' will in target types
|
||||
of the new generator."""
|
||||
target_types = []
|
||||
for t in self.target_types_and_names_:
|
||||
m = _re_match_type.match(t)
|
||||
assert m
|
||||
|
||||
if m.group(1) == base:
|
||||
if m.group(2):
|
||||
target_types.append(type + m.group(2))
|
||||
else:
|
||||
target_types.append(type)
|
||||
else:
|
||||
target_types.append(t)
|
||||
|
||||
return self.__class__(self.id_, self.composing_,
|
||||
self.source_types_,
|
||||
target_types,
|
||||
self.requirements_)
|
||||
|
||||
|
||||
def id (self):
|
||||
return self.id_
|
||||
|
||||
def source_types (self):
|
||||
""" Returns the list of target type the generator accepts.
|
||||
"""
|
||||
return self.source_types_
|
||||
|
||||
def target_types (self):
|
||||
""" Returns the list of target types that this generator produces.
|
||||
It is assumed to be always the same -- i.e. it cannot change depending
|
||||
list of sources.
|
||||
"""
|
||||
return self.target_types_
|
||||
|
||||
def requirements (self):
|
||||
""" Returns the required properties for this generator. Properties
|
||||
in returned set must be present in build properties if this
|
||||
generator is to be used. If result has grist-only element,
|
||||
that build properties must include some value of that feature.
|
||||
"""
|
||||
return self.requirements_
|
||||
|
||||
def match_rank (self, property_set_to_match):
|
||||
""" Returns true if the generator can be run with the specified
|
||||
properties.
|
||||
"""
|
||||
# See if generator's requirements are satisfied by
|
||||
# 'properties'. Treat a feature name in requirements
|
||||
# (i.e. grist-only element), as matching any value of the
|
||||
# feature.
|
||||
all_requirements = self.requirements ()
|
||||
|
||||
property_requirements = []
|
||||
feature_requirements = []
|
||||
for r in all_requirements:
|
||||
if get_value (r):
|
||||
property_requirements.append (r)
|
||||
|
||||
else:
|
||||
feature_requirements.append (r)
|
||||
|
||||
properties_to_match = property_set_to_match.raw ()
|
||||
|
||||
return set.contains (property_requirements, properties_to_match) \
|
||||
and set.contains (feature_requirements, get_grist (properties_to_match))
|
||||
|
||||
def run (self, project, name, prop_set, sources):
|
||||
""" Tries to invoke this generator on the given sources. Returns a
|
||||
list of generated targets (instances of 'virtual-target').
|
||||
|
||||
project: Project for which the targets are generated.
|
||||
|
||||
name: Determines the name of 'name' attribute for
|
||||
all generated targets. See 'generated_targets' method.
|
||||
|
||||
prop_set: Desired properties for generated targets.
|
||||
|
||||
sources: Source targets.
|
||||
"""
|
||||
|
||||
if project.manager ().logger ().on ():
|
||||
project.manager ().logger ().log (__name__, " generator '%s'" % self.id_)
|
||||
project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_)
|
||||
|
||||
if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1:
|
||||
raise BaseException ("Unsupported source/source_type combination")
|
||||
|
||||
# We don't run composing generators if no name is specified. The reason
|
||||
# is that composing generator combines several targets, which can have
|
||||
# different names, and it cannot decide which name to give for produced
|
||||
# target. Therefore, the name must be passed.
|
||||
#
|
||||
# This in effect, means that composing generators are runnable only
|
||||
# at top-level of transofrmation graph, or if name is passed explicitly.
|
||||
# Thus, we dissallow composing generators in the middle. For example, the
|
||||
# transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed
|
||||
# (the OBJ -> STATIC_LIB generator is composing)
|
||||
if not self.composing_ or name:
|
||||
return self.run_really (project, name, prop_set, sources)
|
||||
else:
|
||||
return []
|
||||
|
||||
def run_really (self, project, name, prop_set, sources):
|
||||
|
||||
# consumed: Targets that this generator will consume directly.
|
||||
# bypassed: Targets that can't be consumed and will be returned as-is.
|
||||
|
||||
if self.composing_:
|
||||
(consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources)
|
||||
else:
|
||||
(consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources)
|
||||
|
||||
result = []
|
||||
if consumed:
|
||||
result = self.construct_result (consumed, project, name, prop_set)
|
||||
result.extend (bypassed)
|
||||
|
||||
if result:
|
||||
if project.manager ().logger ().on ():
|
||||
project.manager ().logger ().log (__name__, " SUCCESS: ", result)
|
||||
|
||||
else:
|
||||
project.manager ().logger ().log (__name__, " FAILURE")
|
||||
|
||||
return result
|
||||
|
||||
def construct_result (self, consumed, project, name, prop_set):
|
||||
""" Constructs the dependency graph that will be returned by this
|
||||
generator.
|
||||
consumed: Already prepared list of consumable targets
|
||||
If generator requires several source files will contain
|
||||
exactly len $(self.source_types_) targets with matching types
|
||||
Otherwise, might contain several targets with the type of
|
||||
self.source_types_ [0]
|
||||
project:
|
||||
name:
|
||||
prop_set: Properties to be used for all actions create here
|
||||
"""
|
||||
result = []
|
||||
# If this is 1->1 transformation, apply it to all consumed targets in order.
|
||||
if len (self.source_types_) < 2 and not self.composing_:
|
||||
|
||||
for r in consumed:
|
||||
result.extend (self.generated_targets ([r], prop_set, project, name))
|
||||
|
||||
else:
|
||||
|
||||
if consumed:
|
||||
result.extend (self.generated_targets (consumed, prop_set, project, name))
|
||||
|
||||
return result
|
||||
|
||||
def determine_output_name(self, sources):
|
||||
"""Determine the name of the produced target from the
|
||||
names of the sources."""
|
||||
|
||||
# The simple case if when a name
|
||||
# of source has single dot. Then, we take the part before
|
||||
# dot. Several dots can be caused by:
|
||||
# - Using source file like a.host.cpp
|
||||
# - A type which suffix has a dot. Say, we can
|
||||
# type 'host_cpp' with extension 'host.cpp'.
|
||||
# In the first case, we want to take the part till the last
|
||||
# dot. In the second case -- no sure, but for now take
|
||||
# the part till the last dot too.
|
||||
name = os.path.splitext(sources[0].name())[0]
|
||||
|
||||
for s in sources[1:]:
|
||||
n2 = os.path.splitext(s.name())
|
||||
if n2 != name:
|
||||
get_manager().errors()(
|
||||
"%s: source targets have different names: cannot determine target name"
|
||||
% (self.id_))
|
||||
|
||||
# Names of sources might include directory. We should strip it.
|
||||
return os.path.basename(name)
|
||||
|
||||
|
||||
def generated_targets (self, sources, prop_set, project, name):
|
||||
""" Constructs targets that are created after consuming 'sources'.
|
||||
The result will be the list of virtual-target, which the same length
|
||||
as 'target_types' attribute and with corresponding types.
|
||||
|
||||
When 'name' is empty, all source targets must have the same value of
|
||||
the 'name' attribute, which will be used instead of the 'name' argument.
|
||||
|
||||
The value of 'name' attribute for each generated target will be equal to
|
||||
the 'name' parameter if there's no name pattern for this type. Otherwise,
|
||||
the '%' symbol in the name pattern will be replaced with the 'name' parameter
|
||||
to obtain the 'name' attribute.
|
||||
|
||||
For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes
|
||||
for T1 and T2 are .t1 and t2, and source if foo.z, then created files would
|
||||
be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the
|
||||
basename of a file.
|
||||
|
||||
Note that this pattern mechanism has nothing to do with implicit patterns
|
||||
in make. It's a way to produce target which name is different for name of
|
||||
source.
|
||||
"""
|
||||
if not name:
|
||||
name = self.determine_output_name(sources)
|
||||
|
||||
# Assign an action for each target
|
||||
action = self.action_class()
|
||||
a = action (project.manager(), sources, self.id_, prop_set)
|
||||
|
||||
# Create generated target for each target type.
|
||||
targets = []
|
||||
pre = self.name_prefix_
|
||||
post = self.name_postfix_
|
||||
for t in self.target_types_:
|
||||
generated_name = pre[0] + name + post[0]
|
||||
pre = pre[1:]
|
||||
post = post[1:]
|
||||
|
||||
targets.append(virtual_target.FileTarget(generated_name, False, t, project, a))
|
||||
|
||||
return [ project.manager().virtual_targets().register(t) for t in targets ]
|
||||
|
||||
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False):
|
||||
""" Attempts to convert 'source' to the types that this generator can
|
||||
handle. The intention is to produce the set of targets can should be
|
||||
used when generator is run.
|
||||
only_one: convert 'source' to only one of source types
|
||||
if there's more that one possibility, report an
|
||||
error.
|
||||
|
||||
Returns a pair:
|
||||
consumed: all targets that can be consumed.
|
||||
bypassed: all targets that cannot be consumed.
|
||||
"""
|
||||
consumed = []
|
||||
bypassed = []
|
||||
missing_types = []
|
||||
|
||||
if len (sources) > 1:
|
||||
# Don't know how to handle several sources yet. Just try
|
||||
# to pass the request to other generator
|
||||
missing_types = self.source_types_
|
||||
|
||||
else:
|
||||
(c, m) = self.consume_directly (sources [0])
|
||||
consumed += c
|
||||
missing_types += m
|
||||
|
||||
# No need to search for transformation if
|
||||
# some source type has consumed source and
|
||||
# no more source types are needed.
|
||||
if only_one and consumed:
|
||||
missing_types = []
|
||||
|
||||
#TODO: we should check that only one source type
|
||||
#if create of 'only_one' is true.
|
||||
# TODO: consider if consuned/bypassed separation should
|
||||
# be done by 'construct_types'.
|
||||
|
||||
if missing_types:
|
||||
transformed = construct_types (project, name, missing_types, prop_set, sources)
|
||||
|
||||
# Add targets of right type to 'consumed'. Add others to
|
||||
# 'bypassed'. The 'generators.construct' rule has done
|
||||
# its best to convert everything to the required type.
|
||||
# There's no need to rerun it on targets of different types.
|
||||
|
||||
# NOTE: ignoring usage requirements
|
||||
for t in transformed[1]:
|
||||
if t.type() in missing_types:
|
||||
consumed.append(t)
|
||||
|
||||
else:
|
||||
bypassed.append(t)
|
||||
|
||||
consumed = unique(consumed)
|
||||
bypassed = unique(bypassed)
|
||||
|
||||
# remove elements of 'bypassed' that are in 'consumed'
|
||||
|
||||
# Suppose the target type of current generator, X is produced from
|
||||
# X_1 and X_2, which are produced from Y by one generator.
|
||||
# When creating X_1 from Y, X_2 will be added to 'bypassed'
|
||||
# Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed'
|
||||
# But they are also in 'consumed'. We have to remove them from
|
||||
# bypassed, so that generators up the call stack don't try to convert
|
||||
# them.
|
||||
|
||||
# In this particular case, X_1 instance in 'consumed' and X_1 instance
|
||||
# in 'bypassed' will be the same: because they have the same source and
|
||||
# action name, and 'virtual-target.register' won't allow two different
|
||||
# instances. Therefore, it's OK to use 'set.difference'.
|
||||
|
||||
bypassed = set.difference(bypassed, consumed)
|
||||
|
||||
return (consumed, bypassed)
|
||||
|
||||
|
||||
def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources):
|
||||
""" Converts several files to consumable types.
|
||||
"""
|
||||
consumed = []
|
||||
bypassed = []
|
||||
|
||||
# We process each source one-by-one, trying to convert it to
|
||||
# a usable type.
|
||||
for s in sources:
|
||||
# TODO: need to check for failure on each source.
|
||||
(c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True)
|
||||
if not c:
|
||||
project.manager ().logger ().log (__name__, " failed to convert ", s)
|
||||
|
||||
consumed.extend (c)
|
||||
bypassed.extend (b)
|
||||
|
||||
return (consumed, bypassed)
|
||||
|
||||
def consume_directly (self, source):
|
||||
real_source_type = source.type ()
|
||||
|
||||
# If there are no source types, we can consume anything
|
||||
source_types = self.source_types
|
||||
if not source_types:
|
||||
source_types = [real_source_type]
|
||||
|
||||
consumed = []
|
||||
missing_types = []
|
||||
for st in self.source_types_:
|
||||
# The 'source' if of right type already)
|
||||
if real_source_type == st or type.is_derived (real_source_type, st):
|
||||
consumed.append (source)
|
||||
|
||||
else:
|
||||
missing_types.append (st)
|
||||
|
||||
return (consumed, missing_types)
|
||||
|
||||
def action_class (self):
|
||||
""" Returns the class to be used to actions. Default implementation
|
||||
returns "action".
|
||||
"""
|
||||
return virtual_target.Action
|
||||
|
||||
|
||||
def find (id):
|
||||
""" Finds the generator with id. Returns None if not found.
|
||||
"""
|
||||
return __generators.get (id, None)
|
||||
|
||||
def register (g):
|
||||
""" Registers new generator instance 'g'.
|
||||
"""
|
||||
id = g.id ()
|
||||
|
||||
__generators [id] = g
|
||||
|
||||
# A generator can produce several targets of the
|
||||
# same type. We want unique occurence of that generator
|
||||
# in .generators.$(t) in that case, otherwise, it will
|
||||
# be tried twice and we'll get false ambiguity.
|
||||
for t in sequence.unique(g.target_types()):
|
||||
__type_to_generators.setdefault(t, []).append(g)
|
||||
|
||||
# Update the set of generators for toolset
|
||||
|
||||
# TODO: should we check that generator with this id
|
||||
# is not already registered. For example, the fop.jam
|
||||
# module intentionally declared two generators with the
|
||||
# same id, so such check will break it.
|
||||
|
||||
# Some generators have multiple periods in their name, so the
|
||||
# normal $(id:S=) won't generate the right toolset name.
|
||||
# e.g. if id = gcc.compile.c++, then
|
||||
# .generators-for-toolset.$(id:S=) will append to
|
||||
# .generators-for-toolset.gcc.compile, which is a separate
|
||||
# value from .generators-for-toolset.gcc. Correcting this
|
||||
# makes generator inheritance work properly.
|
||||
# See also inherit-generators in module toolset
|
||||
base = id.split ('.', 100) [0]
|
||||
|
||||
__generators_for_toolset.setdefault(base, []).append(g)
|
||||
|
||||
def register_standard (id, source_types, target_types, requirements = []):
|
||||
""" Creates new instance of the 'generator' class and registers it.
|
||||
Returns the creates instance.
|
||||
Rationale: the instance is returned so that it's possible to first register
|
||||
a generator and then call 'run' method on that generator, bypassing all
|
||||
generator selection.
|
||||
"""
|
||||
g = Generator (id, False, source_types, target_types, requirements)
|
||||
register (g)
|
||||
return g
|
||||
|
||||
def register_composing (id, source_types, target_types, requirements = []):
|
||||
g = Generator (id, True, source_types, target_types, requirements)
|
||||
register (g)
|
||||
return g
|
||||
|
||||
def generators_for_toolset (toolset):
|
||||
""" Returns all generators which belong to 'toolset'.
|
||||
"""
|
||||
return __generators_for_toolset.get(toolset, [])
|
||||
|
||||
def override (overrider_id, overridee_id):
|
||||
"""Make generator 'overrider-id' be preferred to
|
||||
'overridee-id'. If, when searching for generators
|
||||
that could produce a target of certain type,
|
||||
both those generators are amoung viable generators,
|
||||
the overridden generator is immediately discarded.
|
||||
|
||||
The overridden generators are discarded immediately
|
||||
after computing the list of viable generators, before
|
||||
running any of them."""
|
||||
|
||||
__overrides.get(overrider_id, []).append(overridee_id)
|
||||
|
||||
def __viable_source_types_real (target_type):
|
||||
""" Returns a list of source type which can possibly be converted
|
||||
to 'target_type' by some chain of generator invocation.
|
||||
|
||||
More formally, takes all generators for 'target_type' and
|
||||
returns union of source types for those generators and result
|
||||
of calling itself recusrively on source types.
|
||||
"""
|
||||
generators = []
|
||||
|
||||
t = type.all_bases (target_type)
|
||||
|
||||
result = []
|
||||
# 't' is the list of types which are not yet processed
|
||||
while t:
|
||||
# Find all generators for current type.
|
||||
# Unlike 'find_viable_generators' we don't care about prop_set.
|
||||
generators = __type_to_generators.get (t [0], [])
|
||||
t = t[1:]
|
||||
|
||||
for g in generators:
|
||||
if not g.source_types():
|
||||
# Empty source types -- everything can be accepted
|
||||
result = "*"
|
||||
# This will terminate outer loop.
|
||||
t = None
|
||||
break
|
||||
|
||||
for source_type in g.source_types ():
|
||||
if not source_type in result:
|
||||
# If generator accepts 'source_type' it
|
||||
# will happily accept any type derived from it
|
||||
all = type.all_derived (source_type)
|
||||
for n in all:
|
||||
if not n in result:
|
||||
t.append (n)
|
||||
result.append (n)
|
||||
|
||||
result = unique (result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def viable_source_types (target_type):
|
||||
""" Helper rule, caches the result of '__viable_source_types_real'.
|
||||
"""
|
||||
if not __viable_source_types_cache.has_key (target_type):
|
||||
__viable_source_types_cache [target_type] = __viable_source_types_real (target_type)
|
||||
return __viable_source_types_cache [target_type]
|
||||
|
||||
def viable_source_types_for_generator_real (generator):
|
||||
""" Returns the list of source types, which, when passed to 'run'
|
||||
method of 'generator', has some change of being eventually used
|
||||
(probably after conversion by other generators)
|
||||
"""
|
||||
source_types = generator.source_types ()
|
||||
|
||||
if not source_types:
|
||||
# If generator does not specify any source types,
|
||||
# it might be special generator like builtin.lib-generator
|
||||
# which just relays to other generators. Return '*' to
|
||||
# indicate that any source type is possibly OK, since we don't
|
||||
# know for sure.
|
||||
return ['*']
|
||||
|
||||
else:
|
||||
result = []
|
||||
for s in source_types:
|
||||
result += type.all_derived (s) + viable_source_types (s)
|
||||
result = unique (result)
|
||||
if "*" in result:
|
||||
result = ["*"]
|
||||
return result
|
||||
|
||||
def viable_source_types_for_generator (generator):
|
||||
""" Caches the result of 'viable_source_types_for_generator'.
|
||||
"""
|
||||
key = str (generator)
|
||||
if not __viable_source_types_cache.has_key (key):
|
||||
__viable_source_types_cache [key] = viable_source_types_for_generator_real (generator)
|
||||
|
||||
return __viable_source_types_cache [key]
|
||||
|
||||
def try_one_generator_really (project, name, generator, target_type, properties, sources):
|
||||
""" Returns usage requirements + list of created targets.
|
||||
"""
|
||||
targets = generator.run (project, name, properties, sources)
|
||||
|
||||
usage_requirements = []
|
||||
success = False
|
||||
|
||||
dout("returned " + str(targets))
|
||||
|
||||
if targets:
|
||||
success = True;
|
||||
|
||||
if isinstance (targets[0], property_set.PropertySet):
|
||||
usage_requirements = targets [0]
|
||||
targets = targets [1]
|
||||
|
||||
else:
|
||||
usage_requirements = property_set.empty ()
|
||||
|
||||
dout( " generator" + generator.id() + " spawned ")
|
||||
# generators.dout [ indent ] " " $(targets) ;
|
||||
# if $(usage-requirements)
|
||||
# {
|
||||
# generators.dout [ indent ] " with usage requirements:" $(x) ;
|
||||
# }
|
||||
|
||||
if success:
|
||||
return (usage_requirements, targets)
|
||||
else:
|
||||
return None
|
||||
|
||||
def try_one_generator (project, name, generator, target_type, properties, sources):
|
||||
""" Checks if generator invocation can be pruned, because it's guaranteed
|
||||
to fail. If so, quickly returns empty list. Otherwise, calls
|
||||
try_one_generator_really.
|
||||
"""
|
||||
source_types = []
|
||||
|
||||
for s in sources:
|
||||
source_types.append (s.type ())
|
||||
|
||||
viable_source_types = viable_source_types_for_generator (generator)
|
||||
|
||||
if source_types and viable_source_types != ['*'] and\
|
||||
not set.intersection (source_types, viable_source_types):
|
||||
if project.manager ().logger ().on ():
|
||||
id = generator.id ()
|
||||
project.manager ().logger ().log (__name__, "generator '%s' pruned" % id)
|
||||
project.manager ().logger ().log (__name__, "source_types" '%s' % source_types)
|
||||
project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types)
|
||||
|
||||
return []
|
||||
|
||||
else:
|
||||
return try_one_generator_really (project, name, generator, target_type, properties, sources)
|
||||
|
||||
|
||||
def construct_types (project, name, target_types, prop_set, sources):
|
||||
|
||||
result = []
|
||||
usage_requirements = property_set.empty()
|
||||
|
||||
for t in target_types:
|
||||
r = construct (project, name, t, prop_set, sources)
|
||||
|
||||
if r:
|
||||
(ur, targets) = r
|
||||
usage_requirements = usage_requirements.add(ur)
|
||||
result.extend(targets)
|
||||
|
||||
# TODO: have to introduce parameter controlling if
|
||||
# several types can be matched and add appropriate
|
||||
# checks
|
||||
|
||||
# TODO: need to review the documentation for
|
||||
# 'construct' to see if it should return $(source) even
|
||||
# if nothing can be done with it. Currents docs seem to
|
||||
# imply that, contrary to the behaviour.
|
||||
if result:
|
||||
return (usage_requirements, result)
|
||||
|
||||
else:
|
||||
return (usage_requirements, sources)
|
||||
|
||||
def __ensure_type (targets):
|
||||
""" Ensures all 'targets' have types. If this is not so, exists with
|
||||
error.
|
||||
"""
|
||||
for t in targets:
|
||||
if not t.type ():
|
||||
raise BaseException ("target '%s' has no type" % str (t))
|
||||
|
||||
def find_viable_generators_aux (target_type, prop_set):
|
||||
""" Returns generators which can be used to construct target of specified type
|
||||
with specified properties. Uses the following algorithm:
|
||||
- iterates over requested target_type and all it's bases (in the order returned bt
|
||||
type.all-bases.
|
||||
- for each type find all generators that generate that type and which requirements
|
||||
are satisfied by properties.
|
||||
- if the set of generators is not empty, returns that set.
|
||||
|
||||
Note: this algorithm explicitly ignores generators for base classes if there's
|
||||
at least one generator for requested target_type.
|
||||
"""
|
||||
# Select generators that can create the required target type.
|
||||
viable_generators = []
|
||||
initial_generators = []
|
||||
|
||||
import type
|
||||
|
||||
# Try all-type generators first. Assume they have
|
||||
# quite specific requirements.
|
||||
all_bases = type.all_bases(target_type)
|
||||
|
||||
for t in all_bases:
|
||||
|
||||
initial_generators = __type_to_generators.get(t, [])
|
||||
|
||||
if initial_generators:
|
||||
dout("there are generators for this type")
|
||||
if t != target_type:
|
||||
# We're here, when no generators for target-type are found,
|
||||
# but there are some generators for a base type.
|
||||
# We'll try to use them, but they will produce targets of
|
||||
# base type, not of 'target-type'. So, we clone the generators
|
||||
# and modify the list of target types.
|
||||
generators2 = []
|
||||
for g in initial_generators[:]:
|
||||
# generators.register adds generator to the list of generators
|
||||
# for toolsets, which is a bit strange, but should work.
|
||||
# That list is only used when inheriting toolset, which
|
||||
# should have being done before generators are run.
|
||||
ng = g.clone_and_change_target_type(t, target_type)
|
||||
generators2.append(ng)
|
||||
register(ng)
|
||||
|
||||
initial_generators = generators2
|
||||
break
|
||||
|
||||
for g in initial_generators:
|
||||
dout("trying generator " + g.id()
|
||||
+ "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")")
|
||||
|
||||
m = g.match_rank(prop_set)
|
||||
if m:
|
||||
dout(" is viable")
|
||||
viable_generators.append(g)
|
||||
|
||||
return viable_generators
|
||||
|
||||
def find_viable_generators (target_type, prop_set):
|
||||
key = target_type + '.' + str (prop_set)
|
||||
|
||||
l = __viable_generators_cache.get (key, None)
|
||||
|
||||
if not l:
|
||||
l = find_viable_generators_aux (target_type, prop_set)
|
||||
|
||||
__viable_generators_cache [key] = l
|
||||
|
||||
viable_generators = []
|
||||
for g in l:
|
||||
# Avoid trying the same generator twice on different levels.
|
||||
# TODO: is this really used?
|
||||
if not g in __active_generators:
|
||||
viable_generators.append (g)
|
||||
|
||||
# Generators which override 'all'.
|
||||
all_overrides = []
|
||||
|
||||
# Generators which are overriden
|
||||
overriden_ids = []
|
||||
|
||||
for g in viable_generators:
|
||||
id = g.id ()
|
||||
|
||||
this_overrides = __overrides.get (id, [])
|
||||
|
||||
if this_overrides:
|
||||
overriden_ids.extend (this_overrides)
|
||||
if 'all' in this_overrides:
|
||||
all_overrides.append (g)
|
||||
|
||||
if all_overrides:
|
||||
viable_generators = all_overrides
|
||||
|
||||
result = []
|
||||
for g in viable_generators:
|
||||
if not g.id () in overriden_ids:
|
||||
result.append (g)
|
||||
|
||||
return result
|
||||
|
||||
def __construct_really (project, name, target_type, prop_set, sources):
|
||||
""" Attempts to construct target by finding viable generators, running them
|
||||
and selecting the dependency graph.
|
||||
"""
|
||||
viable_generators = find_viable_generators (target_type, prop_set)
|
||||
|
||||
result = []
|
||||
|
||||
project.manager ().logger ().log (__name__, "*** %d viable generators" % len (viable_generators))
|
||||
|
||||
generators_that_succeeded = []
|
||||
|
||||
for g in viable_generators:
|
||||
__active_generators.append(g)
|
||||
r = try_one_generator (project, name, g, target_type, prop_set, sources)
|
||||
del __active_generators[-1]
|
||||
|
||||
if r:
|
||||
generators_that_succeeded.append(g)
|
||||
if result:
|
||||
output = cStringIO.StringIO()
|
||||
print >>output, "ambiguity found when searching for best transformation"
|
||||
print >>output, "Trying to produce type '%s' from: " % (target_type)
|
||||
for s in sources:
|
||||
print >>output, " - " + s.str()
|
||||
print >>output, "Generators that succeeded:"
|
||||
for g in generators_that_succeeded:
|
||||
print >>output, " - " + g.id()
|
||||
print >>output, "First generator produced: "
|
||||
for t in result[1:]:
|
||||
print >>output, " - " + str(t)
|
||||
print >>output, "Second generator produced:"
|
||||
for t in r[1:]:
|
||||
print >>output, " - " + str(t)
|
||||
get_manager().errors()(output.getvalue())
|
||||
else:
|
||||
result = r;
|
||||
|
||||
return result;
|
||||
|
||||
|
||||
def construct (project, name, target_type, prop_set, sources):
|
||||
""" Attempts to create target of 'target-type' with 'properties'
|
||||
from 'sources'. The 'sources' are treated as a collection of
|
||||
*possible* ingridients -- i.e. it is not required to consume
|
||||
them all. If 'multiple' is true, the rule is allowed to return
|
||||
several targets of 'target-type'.
|
||||
|
||||
Returns a list of target. When this invocation is first instance of
|
||||
'construct' in stack, returns only targets of requested 'target-type',
|
||||
otherwise, returns also unused sources and additionally generated
|
||||
targets.
|
||||
"""
|
||||
# TODO: Why is global needed here?
|
||||
global __construct_stack
|
||||
if __construct_stack:
|
||||
__ensure_type (sources)
|
||||
|
||||
__construct_stack.append (1)
|
||||
|
||||
if project.manager().logger().on():
|
||||
increase_indent ()
|
||||
|
||||
dout( "*** construct " + target_type)
|
||||
|
||||
for s in sources:
|
||||
dout(" from " + str(s))
|
||||
|
||||
project.manager().logger().log (__name__, " properties: ", prop_set.raw ())
|
||||
|
||||
result = __construct_really(project, name, target_type, prop_set, sources)
|
||||
|
||||
project.manager().logger().decrease_indent()
|
||||
|
||||
__construct_stack = __construct_stack [1:]
|
||||
|
||||
return result
|
||||
|
||||
996
src/build/project.ann.py
Normal file
996
src/build/project.ann.py
Normal file
@@ -0,0 +1,996 @@
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 1) # Status: being ported by Vladimir Prus
|
||||
ddc17f01 (vladimir_prus 2007-10-26 14:57:56 +0000 2) # Base revision: 40480
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 3)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 4) # Copyright 2002, 2003 Dave Abrahams
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 5) # Copyright 2002, 2005, 2006 Rene Rivera
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 6) # Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 7) # Distributed under the Boost Software License, Version 1.0.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 8) # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 9)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 10) # Implements project representation and loading.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 11) # Each project is represented by
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 12) # - a module where all the Jamfile content live.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 13) # - an instance of 'project-attributes' class.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 14) # (given module name, can be obtained by 'attributes' rule)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 15) # - an instance of 'project-target' class (from targets.jam)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 16) # (given a module name, can be obtained by 'target' rule)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 17) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 18) # Typically, projects are created as result of loading Jamfile, which is
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 19) # do by rules 'load' and 'initialize', below. First, module for Jamfile
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 20) # is loaded and new project-attributes instance is created. Some rules
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 21) # necessary for project are added to the module (see 'project-rules' module)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 22) # at the bottom of this file.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 23) # Default project attributes are set (inheriting attributes of parent project, if
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 24) # it exists). After that, Jamfile is read. It can declare its own attributes,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 25) # via 'project' rule, which will be combined with already set attributes.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 26) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 27) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 28) # The 'project' rule can also declare project id, which will be associated with
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 29) # the project module.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 30) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 31) # There can also be 'standalone' projects. They are created by calling 'initialize'
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 32) # on arbitrary module, and not specifying location. After the call, the module can
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 33) # call 'project' rule, declare main target and behave as regular projects. However,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 34) # since it's not associated with any location, it's better declare only prebuilt
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 35) # targets.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 36) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 37) # The list of all loaded Jamfile is stored in variable .project-locations. It's possible
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 38) # to obtain module name for a location using 'module-name' rule. The standalone projects
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 39) # are not recorded, the only way to use them is by project id.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 40)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 41) import b2.util.path
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 42) from b2.build import property_set, property
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 43) from b2.build.errors import ExceptionWithUserContext
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 44) import b2.build.targets
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 45)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 46) import bjam
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 47)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 48) import re
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 49) import sys
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 50) import os
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 51) import string
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 52) import imp
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 53) import traceback
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 54)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 55) class ProjectRegistry:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 56)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 57) def __init__(self, manager, global_build_dir):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 58) self.manager = manager
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 59) self.global_build_dir = None
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 60) self.project_rules_ = ProjectRules(self)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 61)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 62) # The target corresponding to the project being loaded now
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 63) self.current_project = None
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 64)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 65) # The set of names of loaded project modules
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 66) self.jamfile_modules = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 67)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 68) # Mapping from location to module name
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 69) self.location2module = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 70)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 71) # Mapping from project id to project module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 72) self.id2module = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 73)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 74) # Map from Jamfile directory to parent Jamfile/Jamroot
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 75) # location.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 76) self.dir2parent_jamfile = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 77)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 78) # Map from directory to the name of Jamfile in
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 79) # that directory (or None).
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 80) self.dir2jamfile = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 81)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 82) # Map from project module to attributes object.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 83) self.module2attributes = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 84)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 85) # Map from project module to target for the project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 86) self.module2target = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 87)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 88) # Map from names to Python modules, for modules loaded
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 89) # via 'using' and 'import' rules in Jamfiles.
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 90) self.loaded_tool_modules_ = {}
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 91)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 92) # Map from project target to the list of
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 93) # (id,location) pairs corresponding to all 'use-project'
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 94) # invocations.
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 95) # TODO: should not have a global map, keep this
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 96) # in ProjectTarget.
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 97) self.used_projects = {}
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 98)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 99) self.saved_current_project = []
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 100)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 101) self.JAMROOT = self.manager.getenv("JAMROOT");
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 102)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 103) # Note the use of character groups, as opposed to listing
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 104) # 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 105) # matches on windows and would have to eliminate duplicates.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 106) if not self.JAMROOT:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 107) self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 108)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 109) # Default patterns to search for the Jamfiles to use for build
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 110) # declarations.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 111) self.JAMFILE = self.manager.getenv("JAMFILE")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 112)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 113) if not self.JAMFILE:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 114) self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 115) "[Jj]amfile.jam"]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 116)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 117)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 118) def load (self, jamfile_location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 119) """Loads jamfile at the given location. After loading, project global
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 120) file and jamfile needed by the loaded one will be loaded recursively.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 121) If the jamfile at that location is loaded already, does nothing.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 122) Returns the project module for the Jamfile."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 123)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 124) absolute = os.path.join(os.getcwd(), jamfile_location)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 125) absolute = os.path.normpath(absolute)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 126) jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 127)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 128) if "--debug-loading" in self.manager.argv():
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 129) print "Loading Jamfile at '%s'" % jamfile_location
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 130)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 131)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 132) mname = self.module_name(jamfile_location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 133) # If Jamfile is already loaded, don't try again.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 134) if not mname in self.jamfile_modules:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 135)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 136) self.load_jamfile(jamfile_location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 137)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 138) # We want to make sure that child project are loaded only
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 139) # after parent projects. In particular, because parent projects
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 140) # define attributes whch are inherited by children, and we don't
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 141) # want children to be loaded before parents has defined everything.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 142) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 143) # While "build-project" and "use-project" can potentially refer
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 144) # to child projects from parent projects, we don't immediately
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 145) # load child projects when seing those attributes. Instead,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 146) # we record the minimal information that will be used only later.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 147)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 148) self.load_used_projects(mname)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 149)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 150) return mname
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 151)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 152) def load_used_projects(self, module_name):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 153) # local used = [ modules.peek $(module-name) : .used-projects ] ;
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 154) used = self.used_projects[module_name]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 155)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 156) location = self.attribute(module_name, "location")
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 157) for u in used:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 158) id = u[0]
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 159) where = u[1]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 160)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 161) self.use(id, os.path.join(location, where))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 162)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 163) def load_parent(self, location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 164) """Loads parent of Jamfile at 'location'.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 165) Issues an error if nothing is found."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 166)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 167) found = b2.util.path.glob_in_parents(
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 168) location, self.JAMROOT + self.JAMFILE)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 169)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 170) if not found:
|
||||
1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 171) print "error: Could not find parent for project at '%s'" % location
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 172) print "error: Did not find Jamfile or project-root.jam in any parent directory."
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 173) sys.exit(1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 174)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 175) return self.load(os.path.dirname(found[0]))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 176)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 177) def act_as_jamfile(self, module, location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 178) """Makes the specified 'module' act as if it were a regularly loaded Jamfile
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 179) at 'location'. If Jamfile is already located for that location, it's an
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 180) error."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 181)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 182) if self.module_name(location) in self.jamfile_modules:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 183) self.manager.errors()(
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 184) "Jamfile was already loaded for '%s'" % location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 185)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 186) # Set up non-default mapping from location to module.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 187) self.location2module[location] = module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 188)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 189) # Add the location to the list of project locations
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 190) # so that we don't try to load Jamfile in future
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 191) self.jamfile_modules.append(location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 192)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 193) self.initialize(module, location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 194)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 195) def find(self, name, current_location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 196) """Given 'name' which can be project-id or plain directory name,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 197) return project module corresponding to that id or directory.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 198) Returns nothing of project is not found."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 199)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 200) project_module = None
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 201)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 202) # Try interpreting name as project id.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 203) if name[0] == '/':
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 204) project_module = self.id2module.get(name)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 205)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 206) if not project_module:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 207) location = os.path.join(current_location, name)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 208) # If no project is registered for the given location, try to
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 209) # load it. First see if we have Jamfile. If not we might have project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 210) # root, willing to act as Jamfile. In that case, project-root
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 211) # must be placed in the directory referred by id.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 212)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 213) project_module = self.module_name(location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 214) if not project_module in self.jamfile_modules and \
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 215) b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 216) project_module = self.load(location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 217)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 218) return project_module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 219)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 220) def module_name(self, jamfile_location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 221) """Returns the name of module corresponding to 'jamfile-location'.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 222) If no module corresponds to location yet, associates default
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 223) module name with that location."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 224) module = self.location2module.get(jamfile_location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 225) if not module:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 226) # Root the path, so that locations are always umbiguious.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 227) # Without this, we can't decide if '../../exe/program1' and '.'
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 228) # are the same paths, or not.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 229) jamfile_location = os.path.realpath(
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 230) os.path.join(os.getcwd(), jamfile_location))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 231) module = "Jamfile<%s>" % jamfile_location
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 232) self.location2module[jamfile_location] = module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 233) return module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 234)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 235) def find_jamfile (self, dir, parent_root=0, no_errors=0):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 236) """Find the Jamfile at the given location. This returns the
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 237) exact names of all the Jamfiles in the given directory. The optional
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 238) parent-root argument causes this to search not the given directory
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 239) but the ones above it up to the directory given in it."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 240)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 241) # Glob for all the possible Jamfiles according to the match pattern.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 242) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 243) jamfile_glob = None
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 244) if parent_root:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 245) parent = self.dir2parent_jamfile.get(dir)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 246) if not parent:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 247) parent = b2.util.path.glob_in_parents(dir,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 248) self.JAMFILE)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 249) self.dir2parent_jamfile[dir] = parent
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 250) jamfile_glob = parent
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 251) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 252) jamfile = self.dir2jamfile.get(dir)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 253) if not jamfile:
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 254) jamfile = b2.util.path.glob([dir], self.JAMFILE)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 255) self.dir2jamfile[dir] = jamfile
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 256) jamfile_glob = jamfile
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 257)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 258) if len(jamfile_glob):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 259) # Multiple Jamfiles found in the same place. Warn about this.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 260) # And ensure we use only one of them.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 261) # As a temporary convenience measure, if there's Jamfile.v2 amount
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 262) # found files, suppress the warning and use it.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 263) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 264) pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 265) v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 266) if len(v2_jamfiles) == 1:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 267) jamfile_glob = v2_jamfiles
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 268) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 269) print """warning: Found multiple Jamfiles at '%s'!
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 270) Loading the first one: '%s'.""" % (dir, jamfile_glob[0])
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 271)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 272) # Could not find it, error.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 273) if not no_errors and not jamfile_glob:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 274) self.manager.errors()(
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 275) """Unable to load Jamfile.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 276) Could not find a Jamfile in directory '%s'
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 277) Attempted to find it with pattern '%s'.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 278) Please consult the documentation at 'http://boost.org/b2.'."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 279) % (dir, string.join(self.JAMFILE)))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 280)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 281) return jamfile_glob[0]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 282)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 283) def load_jamfile(self, dir):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 284) """Load a Jamfile at the given directory. Returns nothing.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 285) Will attempt to load the file as indicated by the JAMFILE patterns.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 286) Effect of calling this rule twice with the same 'dir' is underfined."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 287)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 288) # See if the Jamfile is where it should be.
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 289) jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 290) if not jamfile_to_load:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 291) jamfile_to_load = self.find_jamfile(dir)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 292) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 293) jamfile_to_load = jamfile_to_load[0]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 294)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 295) # The module of the jamfile.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 296) dir = os.path.realpath(os.path.dirname(jamfile_to_load))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 297)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 298) jamfile_module = self.module_name (dir)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 299)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 300) # Initialize the jamfile module before loading.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 301) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 302) self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 303)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 304) saved_project = self.current_project
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 305)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 306) self.used_projects[jamfile_module] = []
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 307)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 308) # Now load the Jamfile in it's own context.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 309) # Initialization might have load parent Jamfiles, which might have
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 310) # loaded the current Jamfile with use-project. Do a final check to make
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 311) # sure it's not loaded already.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 312) if not jamfile_module in self.jamfile_modules:
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 313) self.jamfile_modules[jamfile_module] = True
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 314)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 315) # FIXME:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 316) # mark-as-user $(jamfile-module) ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 317)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 318) bjam.call("load", jamfile_module, jamfile_to_load)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 319) basename = os.path.basename(jamfile_to_load)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 320)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 321) # Now do some checks
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 322) if self.current_project != saved_project:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 323) self.manager.errors()(
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 324) """The value of the .current-project variable
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 325) has magically changed after loading a Jamfile.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 326) This means some of the targets might be defined a the wrong project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 327) after loading %s
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 328) expected value %s
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 329) actual value %s""" % (jamfile_module, saved_project, self.current_project))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 330)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 331) if self.global_build_dir:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 332) id = self.attribute(jamfile_module, "id")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 333) project_root = self.attribute(jamfile_module, "project-root")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 334) location = self.attribute(jamfile_module, "location")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 335)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 336) if location and project_root == dir:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 337) # This is Jamroot
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 338) if not id:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 339) # FIXME: go via errors module, so that contexts are
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 340) # shown?
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 341) print "warning: the --build-dir option was specified"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 342) print "warning: but Jamroot at '%s'" % dir
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 343) print "warning: specified no project id"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 344) print "warning: the --build-dir option will be ignored"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 345)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 346)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 347) def load_standalone(self, jamfile_module, file):
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 348) """Loads 'file' as standalone project that has no location
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 349) associated with it. This is mostly useful for user-config.jam,
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 350) which should be able to define targets, but although it has
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 351) some location in filesystem, we don't want any build to
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 352) happen in user's HOME, for example.
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 353)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 354) The caller is required to never call this method twice on
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 355) the same file.
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 356) """
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 357)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 358) self.initialize(jamfile_module)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 359) self.used_projects[jamfile_module] = []
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 360) bjam.call("load", jamfile_module, file)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 361) self.load_used_projects(jamfile_module)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 362)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 363) def is_jamroot(self, basename):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 364) match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 365) if match:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 366) return 1
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 367) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 368) return 0
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 369)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 370) def initialize(self, module_name, location=None, basename=None):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 371) """Initialize the module for a project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 372)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 373) module-name is the name of the project module.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 374) location is the location (directory) of the project to initialize.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 375) If not specified, stanalone project will be initialized
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 376) """
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 377)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 378) if "--debug-loading" in self.manager.argv():
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 379) print "Initializing project '%s'" % module_name
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 380)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 381) # TODO: need to consider if standalone projects can do anything but defining
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 382) # prebuilt targets. If so, we need to give more sensible "location", so that
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 383) # source paths are correct.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 384) if not location:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 385) location = ""
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 386) else:
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 387) location = b2.util.path.relpath(os.getcwd(), location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 388)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 389) attributes = ProjectAttributes(self.manager, location, module_name)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 390) self.module2attributes[module_name] = attributes
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 391)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 392) if location:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 393) attributes.set("source-location", location, exact=1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 394) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 395) attributes.set("source-location", "", exact=1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 396)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 397) attributes.set("requirements", property_set.empty(), exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 398) attributes.set("usage-requirements", property_set.empty(), exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 399) attributes.set("default-build", [], exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 400) attributes.set("projects-to-build", [], exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 401) attributes.set("project-root", None, exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 402) attributes.set("build-dir", None, exact=True)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 403)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 404) self.project_rules_.init_project(module_name)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 405)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 406) jamroot = False
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 407)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 408) parent_module = None;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 409) if module_name == "site-config":
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 410) # No parent
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 411) pass
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 412) elif module_name == "user-config":
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 413) parent_module = "site-config"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 414) elif location and not self.is_jamroot(basename):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 415) # We search for parent/project-root only if jamfile was specified
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 416) # --- i.e
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 417) # if the project is not standalone.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 418) parent_module = self.load_parent(location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 419) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 420) # It's either jamroot, or standalone project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 421) # If it's jamroot, inherit from user-config.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 422) if location:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 423) parent_module = "user-config" ;
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 424) jamroot = True ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 425)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 426) if parent_module:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 427) self.inherit_attributes(module_name, parent_module)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 428) attributes.set("parent-module", parent_module, exact=1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 429)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 430) if jamroot:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 431) attributes.set("project-root", location, exact=1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 432)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 433) parent = None
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 434) if parent_module:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 435) parent = self.target(parent_module)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 436)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 437) if not self.module2target.has_key(module_name):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 438) target = b2.build.targets.ProjectTarget(self.manager,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 439) module_name, module_name, parent,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 440) self.attribute(module_name,"requirements"),
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 441) # FIXME: why we need to pass this? It's not
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 442) # passed in jam code.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 443) self.attribute(module_name, "default-build"))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 444) self.module2target[module_name] = target
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 445)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 446) self.current_project = self.target(module_name)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 447)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 448) def inherit_attributes(self, project_module, parent_module):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 449) """Make 'project-module' inherit attributes of project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 450) root and parent module."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 451)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 452) attributes = self.module2attributes[project_module]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 453) pattributes = self.module2attributes[parent_module]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 454)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 455) # Parent module might be locationless user-config.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 456) # FIXME:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 457) #if [ modules.binding $(parent-module) ]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 458) #{
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 459) # $(attributes).set parent : [ path.parent
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 460) # [ path.make [ modules.binding $(parent-module) ] ] ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 461) # }
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 462)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 463) attributes.set("project-root", pattributes.get("project-root"), exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 464) attributes.set("default-build", pattributes.get("default-build"), exact=True)
|
||||
49c03622 (jhunold 2008-07-23 09:57:41 +0000 465) attributes.set("requirements", pattributes.get("requirements"), exact=True)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 466) attributes.set("usage-requirements",
|
||||
cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 467) pattributes.get("usage-requirements"), exact=1)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 468)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 469) parent_build_dir = pattributes.get("build-dir")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 470)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 471) if parent_build_dir:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 472) # Have to compute relative path from parent dir to our dir
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 473) # Convert both paths to absolute, since we cannot
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 474) # find relative path from ".." to "."
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 475)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 476) location = attributes.get("location")
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 477) parent_location = pattributes.get("location")
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 478)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 479) our_dir = os.path.join(os.getcwd(), location)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 480) parent_dir = os.path.join(os.getcwd(), parent_location)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 481)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 482) build_dir = os.path.join(parent_build_dir,
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 483) b2.util.path.relpath(parent_dir,
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 484) our_dir))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 485)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 486) def register_id(self, id, module):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 487) """Associate the given id with the given project module."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 488) self.id2module[id] = module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 489)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 490) def current(self):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 491) """Returns the project which is currently being loaded."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 492) return self.current_project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 493)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 494) def push_current(self, project):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 495) """Temporary changes the current project to 'project'. Should
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 496) be followed by 'pop-current'."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 497) self.saved_current_project.append(self.current_project)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 498) self.current_project = project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 499)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 500) def pop_current(self):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 501) self.current_project = self.saved_current_project[-1]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 502) del self.saved_current_project[-1]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 503)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 504) def attributes(self, project):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 505) """Returns the project-attribute instance for the
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 506) specified jamfile module."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 507) return self.module2attributes[project]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 508)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 509) def attribute(self, project, attribute):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 510) """Returns the value of the specified attribute in the
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 511) specified jamfile module."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 512) return self.module2attributes[project].get(attribute)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 513)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 514) def target(self, project_module):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 515) """Returns the project target corresponding to the 'project-module'."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 516) if not self.module2target[project_module]:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 517) self.module2target[project_module] = \
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 518) ProjectTarget(project_module, project_module,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 519) self.attribute(project_module, "requirements"))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 520)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 521) return self.module2target[project_module]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 522)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 523) def use(self, id, location):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 524) # Use/load a project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 525) saved_project = self.current_project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 526) project_module = self.load(location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 527) declared_id = self.attribute(project_module, "id")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 528)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 529) if not declared_id or declared_id != id:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 530) # The project at 'location' either have no id or
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 531) # that id is not equal to the 'id' parameter.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 532) if self.id2module[id] and self.id2module[id] != project_module:
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 533) self.manager.errors()(
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 534) """Attempt to redeclare already existing project id '%s'""" % id)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 535) self.id2module[id] = project_module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 536)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 537) self.current_module = saved_project
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 538)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 539) def add_rule(self, name, callable):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 540) """Makes rule 'name' available to all subsequently loaded Jamfiles.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 541)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 542) Calling that rule wil relay to 'callable'."""
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 543) self.project_rules_.add_rule(name, callable)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 544)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 545) def project_rules(self):
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 546) return self.project_rules_
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 547)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 548) def glob_internal(self, project, wildcards, excludes, rule_name):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 549) location = project.get("source-location")
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 550)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 551) result = []
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 552) callable = b2.util.path.__dict__[rule_name]
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 553)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 554) paths = callable(location, wildcards, excludes)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 555) has_dir = 0
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 556) for w in wildcards:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 557) if os.path.dirname(w):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 558) has_dir = 1
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 559) break
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 560)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 561) if has_dir or rule_name != "glob":
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 562) # The paths we've found are relative to current directory,
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 563) # but the names specified in sources list are assumed to
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 564) # be relative to source directory of the corresponding
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 565) # prject. So, just make the name absolute.
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 566) result = [os.path.join(os.getcwd(), p) for p in paths]
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 567) else:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 568) # There were not directory in wildcard, so the files are all
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 569) # in the source directory of the project. Just drop the
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 570) # directory, instead of making paths absolute.
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 571) result = [os.path.basename(p) for p in paths]
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 572)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 573) return result
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 574)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 575) def load_module(self, name, extra_path=None):
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 576) """Classic Boost.Build 'modules' are in fact global variables.
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 577) Therefore, try to find an already loaded Python module called 'name' in sys.modules.
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 578) If the module ist not loaded, find it Boost.Build search
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 579) path and load it. The new module is not entered in sys.modules.
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 580) The motivation here is to have disjoint namespace of modules
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 581) loaded via 'import/using' in Jamfile, and ordinary Python
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 582) modules. We don't want 'using foo' in Jamfile to load ordinary
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 583) Python module 'foo' which is going to not work. And we
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 584) also don't want 'import foo' in regular Python module to
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 585) accidentally grab module named foo that is internal to
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 586) Boost.Build and intended to provide interface to Jamfiles."""
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 587)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 588) existing = self.loaded_tool_modules_.get(name)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 589) if existing:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 590) return existing
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 591)
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 592) modules = sys.modules
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 593) for class_name in modules:
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 594) if name in class_name:
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 595) module = modules[class_name]
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 596) self.loaded_tool_modules_[name] = module
|
||||
53b0faa2 (jhunold 2008-08-10 18:25:50 +0000 597) return module
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 598)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 599) path = extra_path
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 600) if not path:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 601) path = []
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 602) path.extend(self.manager.b2.path())
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 603) location = None
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 604) for p in path:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 605) l = os.path.join(p, name + ".py")
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 606) if os.path.exists(l):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 607) location = l
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 608) break
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 609)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 610) if not location:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 611) self.manager.errors()("Cannot find module '%s'" % name)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 612)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 613) mname = "__build_build_temporary__"
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 614) file = open(location)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 615) try:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 616) # TODO: this means we'll never make use of .pyc module,
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 617) # which might be a problem, or not.
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 618) module = imp.load_module(mname, file, os.path.basename(location),
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 619) (".py", "r", imp.PY_SOURCE))
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 620) del sys.modules[mname]
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 621) self.loaded_tool_modules_[name] = module
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 622) return module
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 623) finally:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 624) file.close()
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 625)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 626)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 627)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 628) # FIXME:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 629) # Defines a Boost.Build extension project. Such extensions usually
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 630) # contain library targets and features that can be used by many people.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 631) # Even though extensions are really projects, they can be initialize as
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 632) # a module would be with the "using" (project.project-rules.using)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 633) # mechanism.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 634) #rule extension ( id : options * : * )
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 635) #{
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 636) # # The caller is a standalone module for the extension.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 637) # local mod = [ CALLER_MODULE ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 638) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 639) # # We need to do the rest within the extension module.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 640) # module $(mod)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 641) # {
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 642) # import path ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 643) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 644) # # Find the root project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 645) # local root-project = [ project.current ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 646) # root-project = [ $(root-project).project-module ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 647) # while
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 648) # [ project.attribute $(root-project) parent-module ] &&
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 649) # [ project.attribute $(root-project) parent-module ] != user-config
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 650) # {
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 651) # root-project = [ project.attribute $(root-project) parent-module ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 652) # }
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 653) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 654) # # Create the project data, and bring in the project rules
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 655) # # into the module.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 656) # project.initialize $(__name__) :
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 657) # [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 658) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 659) # # Create the project itself, i.e. the attributes.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 660) # # All extensions are created in the "/ext" project space.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 661) # project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 662) # local attributes = [ project.attributes $(__name__) ] ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 663) #
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 664) # # Inherit from the root project of whomever is defining us.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 665) # project.inherit-attributes $(__name__) : $(root-project) ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 666) # $(attributes).set parent-module : $(root-project) : exact ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 667) # }
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 668) #}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 669)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 670)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 671) class ProjectAttributes:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 672) """Class keeping all the attributes of a project.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 673)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 674) The standard attributes are 'id', "location", "project-root", "parent"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 675) "requirements", "default-build", "source-location" and "projects-to-build".
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 676) """
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 677)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 678) def __init__(self, manager, location, project_module):
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 679) self.manager = manager
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 680) self.location = location
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 681) self.project_module = project_module
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 682) self.attributes = {}
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 683) self.usage_requirements = None
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 684)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 685) def set(self, attribute, specification, exact):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 686) """Set the named attribute from the specification given by the user.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 687) The value actually set may be different."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 688)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 689) if exact:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 690) self.__dict__[attribute] = specification
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 691)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 692) elif attribute == "requirements":
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 693) self.requirements = property_set.refine_from_user_input(
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 694) self.requirements, specification,
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 695) self.project_module, self.location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 696)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 697) elif attribute == "usage-requirements":
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 698) unconditional = []
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 699) for p in specification:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 700) split = property.split_conditional(p)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 701) if split:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 702) unconditional.append(split[1])
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 703) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 704) unconditional.append(p)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 705)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 706) non_free = property.remove("free", unconditional)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 707) if non_free:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 708) pass
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 709) # FIXME:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 710) #errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ;
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 711)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 712) t = property.translate_paths(specification, self.location)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 713)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 714) existing = self.__dict__.get("usage-requirements")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 715) if existing:
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 716) new = property_set.create(existing.raw() + t)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 717) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 718) new = property_set.create(t)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 719) self.__dict__["usage-requirements"] = new
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 720)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 721)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 722) elif attribute == "default-build":
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 723) self.__dict__["default-build"] = property_set.create(specification)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 724)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 725) elif attribute == "source-location":
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 726) source_location = []
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 727) for path in specification:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 728) source_location += os.path.join(self.location, path)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 729) self.__dict__["source-location"] = source_location
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 730)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 731) elif attribute == "build-dir":
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 732) self.__dict__["build-dir"] = os.path.join(self.location, specification)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 733)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 734) elif not attribute in ["id", "default-build", "location",
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 735) "source-location", "parent",
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 736) "projects-to-build", "project-root"]:
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 737) self.manager.errors()(
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 738) """Invalid project attribute '%s' specified
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 739) for project at '%s'""" % (attribute, self.location))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 740) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 741) self.__dict__[attribute] = specification
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 742)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 743) def get(self, attribute):
|
||||
cde6f09a (vladimir_prus 2007-10-19 23:12:33 +0000 744) return self.__dict__[attribute]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 745)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 746) def dump(self):
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 747) """Prints the project attributes."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 748) id = self.get("id")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 749) if not id:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 750) id = "(none)"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 751) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 752) id = id[0]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 753)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 754) parent = self.get("parent")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 755) if not parent:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 756) parent = "(none)"
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 757) else:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 758) parent = parent[0]
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 759)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 760) print "'%s'" % id
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 761) print "Parent project:%s", parent
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 762) print "Requirements:%s", self.get("requirements")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 763) print "Default build:%s", string.join(self.get("debuild-build"))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 764) print "Source location:%s", string.join(self.get("source-location"))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 765) print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 766)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 767) class ProjectRules:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 768) """Class keeping all rules that are made available to Jamfile."""
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 769)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 770) def __init__(self, registry):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 771) self.registry = registry
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 772) self.manager_ = registry.manager
|
||||
38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 773) self.rules = {}
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 774) self.local_names = [x for x in self.__class__.__dict__
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 775) if x not in ["__init__", "init_project", "add_rule",
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 776) "error_reporting_wrapper", "add_rule_for_type"]]
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 777) self.all_names_ = [x for x in self.local_names]
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 778)
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 779) def add_rule_for_type(self, type):
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 780) rule_name = type.lower();
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 781)
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 782) def xpto (name, sources, requirements = [], default_build = None, usage_requirements = []):
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 783) return self.manager_.targets().create_typed_target(
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 784) type, self.registry.current(), name[0], sources,
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 785) requirements, default_build, usage_requirements)
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 786)
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 787) self.add_rule(type.lower(), xpto)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 788)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 789) def add_rule(self, name, callable):
|
||||
38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 790) self.rules[name] = callable
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 791) self.all_names_.append(name)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 792)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 793) def all_names(self):
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 794) return self.all_names_
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 795)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 796) def call_and_report_errors(self, callable, *args):
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 797) result = None
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 798) try:
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 799) self.manager_.errors().push_jamfile_context()
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 800) result = callable(*args)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 801) except ExceptionWithUserContext, e:
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 802) e.report()
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 803) except Exception, e:
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 804) try:
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 805) self.manager_.errors().handle_stray_exception (e)
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 806) except ExceptionWithUserContext, e:
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 807) e.report()
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 808) finally:
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 809) self.manager_.errors().pop_jamfile_context()
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 810)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 811) return result
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 812)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 813) def make_wrapper(self, callable):
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 814) """Given a free-standing function 'callable', return a new
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 815) callable that will call 'callable' and report all exceptins,
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 816) using 'call_and_report_errors'."""
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 817) def wrapper(*args):
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 818) self.call_and_report_errors(callable, *args)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 819) return wrapper
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 820)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 821) def init_project(self, project_module):
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 822)
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 823) for n in self.local_names:
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 824) # Using 'getattr' here gives us a bound method,
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 825) # while using self.__dict__[r] would give unbound one.
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 826) v = getattr(self, n)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 827) if callable(v):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 828) if n == "import_":
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 829) n = "import"
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 830) else:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 831) n = string.replace(n, "_", "-")
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 832)
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 833) bjam.import_rule(project_module, n,
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 834) self.make_wrapper(v))
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 835)
|
||||
38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 836) for n in self.rules:
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 837) bjam.import_rule(project_module, n,
|
||||
0317671e (vladimir_prus 2007-10-28 14:02:06 +0000 838) self.make_wrapper(self.rules[n]))
|
||||
38d984eb (vladimir_prus 2007-10-13 17:52:25 +0000 839)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 840) def project(self, *args):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 841)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 842) jamfile_module = self.registry.current().project_module()
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 843) attributes = self.registry.attributes(jamfile_module)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 844)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 845) id = None
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 846) if args and args[0]:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 847) id = args[0][0]
|
||||
092119e3 (vladimir_prus 2007-10-16 05:45:31 +0000 848) args = args[1:]
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 849)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 850) if id:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 851) if id[0] != '/':
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 852) id = '/' + id
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 853) self.registry.register_id (id, jamfile_module)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 854)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 855) explicit_build_dir = None
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 856) for a in args:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 857) if a:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 858) attributes.set(a[0], a[1:], exact=0)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 859) if a[0] == "build-dir":
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 860) explicit_build_dir = a[1]
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 861)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 862) # If '--build-dir' is specified, change the build dir for the project.
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 863) if self.registry.global_build_dir:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 864)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 865) location = attributes.get("location")
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 866) # Project with empty location is 'standalone' project, like
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 867) # user-config, or qt. It has no build dir.
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 868) # If we try to set build dir for user-config, we'll then
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 869) # try to inherit it, with either weird, or wrong consequences.
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 870) if location and location == attributes.get("project-root"):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 871) # This is Jamroot.
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 872) if id:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 873) if explicit_build_dir and os.path.isabs(explicit_build_dir):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 874) self.register.manager.errors()(
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 875) """Absolute directory specified via 'build-dir' project attribute
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 876) Don't know how to combine that with the --build-dir option.""")
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 877)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 878) rid = id
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 879) if rid[0] == '/':
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 880) rid = rid[1:]
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 881)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 882) p = os.path.join(self.registry.global_build_dir,
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 883) rid, explicit_build_dir)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 884) attributes.set("build-dir", p, exact=1)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 885) elif explicit_build_dir:
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 886) self.registry.manager.errors()(
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 887) """When --build-dir is specified, the 'build-project'
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 888) attribute is allowed only for top-level 'project' invocations""")
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 889)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 890) def constant(self, name, value):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 891) """Declare and set a project global constant.
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 892) Project global constants are normal variables but should
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 893) not be changed. They are applied to every child Jamfile."""
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 894) m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 895) self.registry.current().add_constant(name[0], value)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 896)
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 897) def path_constant(self, name, value):
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 898) """Declare and set a project global constant, whose value is a path. The
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 899) path is adjusted to be relative to the invocation directory. The given
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 900) value path is taken to be either absolute, or relative to this project
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 901) root."""
|
||||
0ed8e16d (vladimir_prus 2007-10-13 21:34:05 +0000 902) self.registry.current().add_constant(name[0], value, path=1)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 903)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 904) def use_project(self, id, where):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 905) # See comment in 'load' for explanation why we record the
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 906) # parameters as opposed to loading the project now.
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 907) m = self.registry.current().project_module();
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 908) self.registry.used_projects[m].append((id, where))
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 909)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 910) def build_project(self, dir):
|
||||
1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 911) assert(isinstance(dir, list))
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 912) jamfile_module = self.registry.current().project_module()
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 913) attributes = self.registry.attributes(jamfile_module)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 914) now = attributes.get("projects-to-build")
|
||||
1674e2d9 (jhunold 2008-08-08 19:52:05 +0000 915) attributes.set("projects-to-build", now + dir, exact=True)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 916)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 917) def explicit(self, target_names):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 918) t = self.registry.current()
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 919) for n in target_names:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 920) t.mark_target_as_explicit(n)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 921)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 922) def glob(self, wildcards, excludes=None):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 923) return self.registry.glob_internal(self.registry.current(),
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 924) wildcards, excludes, "glob")
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 925)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 926) def glob_tree(self, wildcards, excludes=None):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 927) bad = 0
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 928) for p in wildcards:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 929) if os.path.dirname(p):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 930) bad = 1
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 931)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 932) if excludes:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 933) for p in excludes:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 934) if os.path.dirname(p):
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 935) bad = 1
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 936)
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 937) if bad:
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 938) self.registry.manager().errors()(
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 939) "The patterns to 'glob-tree' may not include directory")
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 940) return self.registry.glob_internal(self.registry.current(),
|
||||
2a36874b (vladimir_prus 2007-10-14 07:20:55 +0000 941) wildcards, excludes, "glob_tree")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 942)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 943)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 944) def using(self, toolset, *args):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 945) # The module referred by 'using' can be placed in
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 946) # the same directory as Jamfile, and the user
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 947) # will expect the module to be found even though
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 948) # the directory is not in BOOST_BUILD_PATH.
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 949) # So temporary change the search path.
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 950) jamfile_module = self.registry.current().project_module()
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 951) attributes = self.registry.attributes(jamfile_module)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 952) location = attributes.get("location")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 953)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 954) m = self.registry.load_module(toolset[0], [location])
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 955) if not m.__dict__.has_key("init"):
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 956) self.registry.manager.errors()(
|
||||
7da7f9c1 (vladimir_prus 2008-05-18 04:29:53 +0000 957) "Tool module '%s' does not define the 'init' method" % toolset[0])
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 958) m.init(*args)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 959)
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 960)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 961) def import_(self, name, names_to_import=None, local_names=None):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 962)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 963) name = name[0]
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 964) jamfile_module = self.registry.current().project_module()
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 965) attributes = self.registry.attributes(jamfile_module)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 966) location = attributes.get("location")
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 967)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 968) m = self.registry.load_module(name, [location])
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 969)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 970) for f in m.__dict__:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 971) v = m.__dict__[f]
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 972) if callable(v):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 973) bjam.import_rule(jamfile_module, name + "." + f, v)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 974)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 975) if names_to_import:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 976) if not local_names:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 977) local_names = names_to_import
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 978)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 979) if len(names_to_import) != len(local_names):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 980) self.registry.manager.errors()(
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 981) """The number of names to import and local names do not match.""")
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 982)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 983) for n, l in zip(names_to_import, local_names):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 984) bjam.import_rule(jamfile_module, l, m.__dict__[n])
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 985)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 986) def conditional(self, condition, requirements):
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 987) """Calculates conditional requirements for multiple requirements
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 988) at once. This is a shorthand to be reduce duplication and to
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 989) keep an inline declarative syntax. For example:
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 990)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 991) lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 992) <define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 993) """
|
||||
f049766b (vladimir_prus 2007-10-10 09:31:06 +0000 994)
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 995) c = string.join(condition, ",")
|
||||
f2aef897 (vladimir_prus 2007-10-14 09:19:52 +0000 996) return [c + ":" + r for r in requirements]
|
||||
996
src/build/project.py
Normal file
996
src/build/project.py
Normal file
@@ -0,0 +1,996 @@
|
||||
# Status: being ported by Vladimir Prus
|
||||
# Base revision: 40480
|
||||
|
||||
# Copyright 2002, 2003 Dave Abrahams
|
||||
# Copyright 2002, 2005, 2006 Rene Rivera
|
||||
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Implements project representation and loading.
|
||||
# Each project is represented by
|
||||
# - a module where all the Jamfile content live.
|
||||
# - an instance of 'project-attributes' class.
|
||||
# (given module name, can be obtained by 'attributes' rule)
|
||||
# - an instance of 'project-target' class (from targets.jam)
|
||||
# (given a module name, can be obtained by 'target' rule)
|
||||
#
|
||||
# Typically, projects are created as result of loading Jamfile, which is
|
||||
# do by rules 'load' and 'initialize', below. First, module for Jamfile
|
||||
# is loaded and new project-attributes instance is created. Some rules
|
||||
# necessary for project are added to the module (see 'project-rules' module)
|
||||
# at the bottom of this file.
|
||||
# Default project attributes are set (inheriting attributes of parent project, if
|
||||
# it exists). After that, Jamfile is read. It can declare its own attributes,
|
||||
# via 'project' rule, which will be combined with already set attributes.
|
||||
#
|
||||
#
|
||||
# The 'project' rule can also declare project id, which will be associated with
|
||||
# the project module.
|
||||
#
|
||||
# There can also be 'standalone' projects. They are created by calling 'initialize'
|
||||
# on arbitrary module, and not specifying location. After the call, the module can
|
||||
# call 'project' rule, declare main target and behave as regular projects. However,
|
||||
# since it's not associated with any location, it's better declare only prebuilt
|
||||
# targets.
|
||||
#
|
||||
# The list of all loaded Jamfile is stored in variable .project-locations. It's possible
|
||||
# to obtain module name for a location using 'module-name' rule. The standalone projects
|
||||
# are not recorded, the only way to use them is by project id.
|
||||
|
||||
import b2.util.path
|
||||
from b2.build import property_set, property
|
||||
from b2.build.errors import ExceptionWithUserContext
|
||||
import b2.build.targets
|
||||
|
||||
import bjam
|
||||
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import string
|
||||
import imp
|
||||
import traceback
|
||||
|
||||
class ProjectRegistry:
|
||||
|
||||
def __init__(self, manager, global_build_dir):
|
||||
self.manager = manager
|
||||
self.global_build_dir = None
|
||||
self.project_rules_ = ProjectRules(self)
|
||||
|
||||
# The target corresponding to the project being loaded now
|
||||
self.current_project = None
|
||||
|
||||
# The set of names of loaded project modules
|
||||
self.jamfile_modules = {}
|
||||
|
||||
# Mapping from location to module name
|
||||
self.location2module = {}
|
||||
|
||||
# Mapping from project id to project module
|
||||
self.id2module = {}
|
||||
|
||||
# Map from Jamfile directory to parent Jamfile/Jamroot
|
||||
# location.
|
||||
self.dir2parent_jamfile = {}
|
||||
|
||||
# Map from directory to the name of Jamfile in
|
||||
# that directory (or None).
|
||||
self.dir2jamfile = {}
|
||||
|
||||
# Map from project module to attributes object.
|
||||
self.module2attributes = {}
|
||||
|
||||
# Map from project module to target for the project
|
||||
self.module2target = {}
|
||||
|
||||
# Map from names to Python modules, for modules loaded
|
||||
# via 'using' and 'import' rules in Jamfiles.
|
||||
self.loaded_tool_modules_ = {}
|
||||
|
||||
# Map from project target to the list of
|
||||
# (id,location) pairs corresponding to all 'use-project'
|
||||
# invocations.
|
||||
# TODO: should not have a global map, keep this
|
||||
# in ProjectTarget.
|
||||
self.used_projects = {}
|
||||
|
||||
self.saved_current_project = []
|
||||
|
||||
self.JAMROOT = self.manager.getenv("JAMROOT");
|
||||
|
||||
# Note the use of character groups, as opposed to listing
|
||||
# 'Jamroot' and 'jamroot'. With the latter, we'd get duplicate
|
||||
# matches on windows and would have to eliminate duplicates.
|
||||
if not self.JAMROOT:
|
||||
self.JAMROOT = ["project-root.jam", "[Jj]amroot", "[Jj]amroot.jam"]
|
||||
|
||||
# Default patterns to search for the Jamfiles to use for build
|
||||
# declarations.
|
||||
self.JAMFILE = self.manager.getenv("JAMFILE")
|
||||
|
||||
if not self.JAMFILE:
|
||||
self.JAMFILE = ["[Bb]uild.jam", "[Jj]amfile.v2", "[Jj]amfile",
|
||||
"[Jj]amfile.jam"]
|
||||
|
||||
|
||||
def load (self, jamfile_location):
|
||||
"""Loads jamfile at the given location. After loading, project global
|
||||
file and jamfile needed by the loaded one will be loaded recursively.
|
||||
If the jamfile at that location is loaded already, does nothing.
|
||||
Returns the project module for the Jamfile."""
|
||||
|
||||
absolute = os.path.join(os.getcwd(), jamfile_location)
|
||||
absolute = os.path.normpath(absolute)
|
||||
jamfile_location = b2.util.path.relpath(os.getcwd(), absolute)
|
||||
|
||||
if "--debug-loading" in self.manager.argv():
|
||||
print "Loading Jamfile at '%s'" % jamfile_location
|
||||
|
||||
|
||||
mname = self.module_name(jamfile_location)
|
||||
# If Jamfile is already loaded, don't try again.
|
||||
if not mname in self.jamfile_modules:
|
||||
|
||||
self.load_jamfile(jamfile_location)
|
||||
|
||||
# We want to make sure that child project are loaded only
|
||||
# after parent projects. In particular, because parent projects
|
||||
# define attributes whch are inherited by children, and we don't
|
||||
# want children to be loaded before parents has defined everything.
|
||||
#
|
||||
# While "build-project" and "use-project" can potentially refer
|
||||
# to child projects from parent projects, we don't immediately
|
||||
# load child projects when seing those attributes. Instead,
|
||||
# we record the minimal information that will be used only later.
|
||||
|
||||
self.load_used_projects(mname)
|
||||
|
||||
return mname
|
||||
|
||||
def load_used_projects(self, module_name):
|
||||
# local used = [ modules.peek $(module-name) : .used-projects ] ;
|
||||
used = self.used_projects[module_name]
|
||||
|
||||
location = self.attribute(module_name, "location")
|
||||
for u in used:
|
||||
id = u[0]
|
||||
where = u[1]
|
||||
|
||||
self.use(id, os.path.join(location, where))
|
||||
|
||||
def load_parent(self, location):
|
||||
"""Loads parent of Jamfile at 'location'.
|
||||
Issues an error if nothing is found."""
|
||||
|
||||
found = b2.util.path.glob_in_parents(
|
||||
location, self.JAMROOT + self.JAMFILE)
|
||||
|
||||
if not found:
|
||||
print "error: Could not find parent for project at '%s'" % location
|
||||
print "error: Did not find Jamfile or project-root.jam in any parent directory."
|
||||
sys.exit(1)
|
||||
|
||||
return self.load(os.path.dirname(found[0]))
|
||||
|
||||
def act_as_jamfile(self, module, location):
|
||||
"""Makes the specified 'module' act as if it were a regularly loaded Jamfile
|
||||
at 'location'. If Jamfile is already located for that location, it's an
|
||||
error."""
|
||||
|
||||
if self.module_name(location) in self.jamfile_modules:
|
||||
self.manager.errors()(
|
||||
"Jamfile was already loaded for '%s'" % location)
|
||||
|
||||
# Set up non-default mapping from location to module.
|
||||
self.location2module[location] = module
|
||||
|
||||
# Add the location to the list of project locations
|
||||
# so that we don't try to load Jamfile in future
|
||||
self.jamfile_modules.append(location)
|
||||
|
||||
self.initialize(module, location)
|
||||
|
||||
def find(self, name, current_location):
|
||||
"""Given 'name' which can be project-id or plain directory name,
|
||||
return project module corresponding to that id or directory.
|
||||
Returns nothing of project is not found."""
|
||||
|
||||
project_module = None
|
||||
|
||||
# Try interpreting name as project id.
|
||||
if name[0] == '/':
|
||||
project_module = self.id2module.get(name)
|
||||
|
||||
if not project_module:
|
||||
location = os.path.join(current_location, name)
|
||||
# If no project is registered for the given location, try to
|
||||
# load it. First see if we have Jamfile. If not we might have project
|
||||
# root, willing to act as Jamfile. In that case, project-root
|
||||
# must be placed in the directory referred by id.
|
||||
|
||||
project_module = self.module_name(location)
|
||||
if not project_module in self.jamfile_modules and \
|
||||
b2.util.path.glob([location], self.JAMROOT + self.JAMFILE):
|
||||
project_module = self.load(location)
|
||||
|
||||
return project_module
|
||||
|
||||
def module_name(self, jamfile_location):
|
||||
"""Returns the name of module corresponding to 'jamfile-location'.
|
||||
If no module corresponds to location yet, associates default
|
||||
module name with that location."""
|
||||
module = self.location2module.get(jamfile_location)
|
||||
if not module:
|
||||
# Root the path, so that locations are always umbiguious.
|
||||
# Without this, we can't decide if '../../exe/program1' and '.'
|
||||
# are the same paths, or not.
|
||||
jamfile_location = os.path.realpath(
|
||||
os.path.join(os.getcwd(), jamfile_location))
|
||||
module = "Jamfile<%s>" % jamfile_location
|
||||
self.location2module[jamfile_location] = module
|
||||
return module
|
||||
|
||||
def find_jamfile (self, dir, parent_root=0, no_errors=0):
|
||||
"""Find the Jamfile at the given location. This returns the
|
||||
exact names of all the Jamfiles in the given directory. The optional
|
||||
parent-root argument causes this to search not the given directory
|
||||
but the ones above it up to the directory given in it."""
|
||||
|
||||
# Glob for all the possible Jamfiles according to the match pattern.
|
||||
#
|
||||
jamfile_glob = None
|
||||
if parent_root:
|
||||
parent = self.dir2parent_jamfile.get(dir)
|
||||
if not parent:
|
||||
parent = b2.util.path.glob_in_parents(dir,
|
||||
self.JAMFILE)
|
||||
self.dir2parent_jamfile[dir] = parent
|
||||
jamfile_glob = parent
|
||||
else:
|
||||
jamfile = self.dir2jamfile.get(dir)
|
||||
if not jamfile:
|
||||
jamfile = b2.util.path.glob([dir], self.JAMFILE)
|
||||
self.dir2jamfile[dir] = jamfile
|
||||
jamfile_glob = jamfile
|
||||
|
||||
if len(jamfile_glob):
|
||||
# Multiple Jamfiles found in the same place. Warn about this.
|
||||
# And ensure we use only one of them.
|
||||
# As a temporary convenience measure, if there's Jamfile.v2 amount
|
||||
# found files, suppress the warning and use it.
|
||||
#
|
||||
pattern = "(.*[Jj]amfile\\.v2)|(.*[Bb]uild\\.jam)"
|
||||
v2_jamfiles = [x for x in jamfile_glob if re.match(pattern, x)]
|
||||
if len(v2_jamfiles) == 1:
|
||||
jamfile_glob = v2_jamfiles
|
||||
else:
|
||||
print """warning: Found multiple Jamfiles at '%s'!
|
||||
Loading the first one: '%s'.""" % (dir, jamfile_glob[0])
|
||||
|
||||
# Could not find it, error.
|
||||
if not no_errors and not jamfile_glob:
|
||||
self.manager.errors()(
|
||||
"""Unable to load Jamfile.
|
||||
Could not find a Jamfile in directory '%s'
|
||||
Attempted to find it with pattern '%s'.
|
||||
Please consult the documentation at 'http://boost.org/boost-build2'."""
|
||||
% (dir, string.join(self.JAMFILE)))
|
||||
|
||||
return jamfile_glob[0]
|
||||
|
||||
def load_jamfile(self, dir):
|
||||
"""Load a Jamfile at the given directory. Returns nothing.
|
||||
Will attempt to load the file as indicated by the JAMFILE patterns.
|
||||
Effect of calling this rule twice with the same 'dir' is underfined."""
|
||||
|
||||
# See if the Jamfile is where it should be.
|
||||
jamfile_to_load = b2.util.path.glob([dir], self.JAMROOT)
|
||||
if not jamfile_to_load:
|
||||
jamfile_to_load = self.find_jamfile(dir)
|
||||
else:
|
||||
jamfile_to_load = jamfile_to_load[0]
|
||||
|
||||
# The module of the jamfile.
|
||||
dir = os.path.realpath(os.path.dirname(jamfile_to_load))
|
||||
|
||||
jamfile_module = self.module_name (dir)
|
||||
|
||||
# Initialize the jamfile module before loading.
|
||||
#
|
||||
self.initialize(jamfile_module, dir, os.path.basename(jamfile_to_load))
|
||||
|
||||
saved_project = self.current_project
|
||||
|
||||
self.used_projects[jamfile_module] = []
|
||||
|
||||
# Now load the Jamfile in it's own context.
|
||||
# Initialization might have load parent Jamfiles, which might have
|
||||
# loaded the current Jamfile with use-project. Do a final check to make
|
||||
# sure it's not loaded already.
|
||||
if not jamfile_module in self.jamfile_modules:
|
||||
self.jamfile_modules[jamfile_module] = True
|
||||
|
||||
# FIXME:
|
||||
# mark-as-user $(jamfile-module) ;
|
||||
|
||||
bjam.call("load", jamfile_module, jamfile_to_load)
|
||||
basename = os.path.basename(jamfile_to_load)
|
||||
|
||||
# Now do some checks
|
||||
if self.current_project != saved_project:
|
||||
self.manager.errors()(
|
||||
"""The value of the .current-project variable
|
||||
has magically changed after loading a Jamfile.
|
||||
This means some of the targets might be defined a the wrong project.
|
||||
after loading %s
|
||||
expected value %s
|
||||
actual value %s""" % (jamfile_module, saved_project, self.current_project))
|
||||
|
||||
if self.global_build_dir:
|
||||
id = self.attribute(jamfile_module, "id")
|
||||
project_root = self.attribute(jamfile_module, "project-root")
|
||||
location = self.attribute(jamfile_module, "location")
|
||||
|
||||
if location and project_root == dir:
|
||||
# This is Jamroot
|
||||
if not id:
|
||||
# FIXME: go via errors module, so that contexts are
|
||||
# shown?
|
||||
print "warning: the --build-dir option was specified"
|
||||
print "warning: but Jamroot at '%s'" % dir
|
||||
print "warning: specified no project id"
|
||||
print "warning: the --build-dir option will be ignored"
|
||||
|
||||
|
||||
def load_standalone(self, jamfile_module, file):
|
||||
"""Loads 'file' as standalone project that has no location
|
||||
associated with it. This is mostly useful for user-config.jam,
|
||||
which should be able to define targets, but although it has
|
||||
some location in filesystem, we don't want any build to
|
||||
happen in user's HOME, for example.
|
||||
|
||||
The caller is required to never call this method twice on
|
||||
the same file.
|
||||
"""
|
||||
|
||||
self.initialize(jamfile_module)
|
||||
self.used_projects[jamfile_module] = []
|
||||
bjam.call("load", jamfile_module, file)
|
||||
self.load_used_projects(jamfile_module)
|
||||
|
||||
def is_jamroot(self, basename):
|
||||
match = [ pat for pat in self.JAMROOT if re.match(pat, basename)]
|
||||
if match:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def initialize(self, module_name, location=None, basename=None):
|
||||
"""Initialize the module for a project.
|
||||
|
||||
module-name is the name of the project module.
|
||||
location is the location (directory) of the project to initialize.
|
||||
If not specified, stanalone project will be initialized
|
||||
"""
|
||||
|
||||
if "--debug-loading" in self.manager.argv():
|
||||
print "Initializing project '%s'" % module_name
|
||||
|
||||
# TODO: need to consider if standalone projects can do anything but defining
|
||||
# prebuilt targets. If so, we need to give more sensible "location", so that
|
||||
# source paths are correct.
|
||||
if not location:
|
||||
location = ""
|
||||
else:
|
||||
location = b2.util.path.relpath(os.getcwd(), location)
|
||||
|
||||
attributes = ProjectAttributes(self.manager, location, module_name)
|
||||
self.module2attributes[module_name] = attributes
|
||||
|
||||
if location:
|
||||
attributes.set("source-location", location, exact=1)
|
||||
else:
|
||||
attributes.set("source-location", "", exact=1)
|
||||
|
||||
attributes.set("requirements", property_set.empty(), exact=True)
|
||||
attributes.set("usage-requirements", property_set.empty(), exact=True)
|
||||
attributes.set("default-build", [], exact=True)
|
||||
attributes.set("projects-to-build", [], exact=True)
|
||||
attributes.set("project-root", None, exact=True)
|
||||
attributes.set("build-dir", None, exact=True)
|
||||
|
||||
self.project_rules_.init_project(module_name)
|
||||
|
||||
jamroot = False
|
||||
|
||||
parent_module = None;
|
||||
if module_name == "site-config":
|
||||
# No parent
|
||||
pass
|
||||
elif module_name == "user-config":
|
||||
parent_module = "site-config"
|
||||
elif location and not self.is_jamroot(basename):
|
||||
# We search for parent/project-root only if jamfile was specified
|
||||
# --- i.e
|
||||
# if the project is not standalone.
|
||||
parent_module = self.load_parent(location)
|
||||
else:
|
||||
# It's either jamroot, or standalone project.
|
||||
# If it's jamroot, inherit from user-config.
|
||||
if location:
|
||||
parent_module = "user-config" ;
|
||||
jamroot = True ;
|
||||
|
||||
if parent_module:
|
||||
self.inherit_attributes(module_name, parent_module)
|
||||
attributes.set("parent-module", parent_module, exact=1)
|
||||
|
||||
if jamroot:
|
||||
attributes.set("project-root", location, exact=1)
|
||||
|
||||
parent = None
|
||||
if parent_module:
|
||||
parent = self.target(parent_module)
|
||||
|
||||
if not self.module2target.has_key(module_name):
|
||||
target = b2.build.targets.ProjectTarget(self.manager,
|
||||
module_name, module_name, parent,
|
||||
self.attribute(module_name,"requirements"),
|
||||
# FIXME: why we need to pass this? It's not
|
||||
# passed in jam code.
|
||||
self.attribute(module_name, "default-build"))
|
||||
self.module2target[module_name] = target
|
||||
|
||||
self.current_project = self.target(module_name)
|
||||
|
||||
def inherit_attributes(self, project_module, parent_module):
|
||||
"""Make 'project-module' inherit attributes of project
|
||||
root and parent module."""
|
||||
|
||||
attributes = self.module2attributes[project_module]
|
||||
pattributes = self.module2attributes[parent_module]
|
||||
|
||||
# Parent module might be locationless user-config.
|
||||
# FIXME:
|
||||
#if [ modules.binding $(parent-module) ]
|
||||
#{
|
||||
# $(attributes).set parent : [ path.parent
|
||||
# [ path.make [ modules.binding $(parent-module) ] ] ] ;
|
||||
# }
|
||||
|
||||
attributes.set("project-root", pattributes.get("project-root"), exact=True)
|
||||
attributes.set("default-build", pattributes.get("default-build"), exact=True)
|
||||
attributes.set("requirements", pattributes.get("requirements"), exact=True)
|
||||
attributes.set("usage-requirements",
|
||||
pattributes.get("usage-requirements"), exact=1)
|
||||
|
||||
parent_build_dir = pattributes.get("build-dir")
|
||||
|
||||
if parent_build_dir:
|
||||
# Have to compute relative path from parent dir to our dir
|
||||
# Convert both paths to absolute, since we cannot
|
||||
# find relative path from ".." to "."
|
||||
|
||||
location = attributes.get("location")
|
||||
parent_location = pattributes.get("location")
|
||||
|
||||
our_dir = os.path.join(os.getcwd(), location)
|
||||
parent_dir = os.path.join(os.getcwd(), parent_location)
|
||||
|
||||
build_dir = os.path.join(parent_build_dir,
|
||||
b2.util.path.relpath(parent_dir,
|
||||
our_dir))
|
||||
|
||||
def register_id(self, id, module):
|
||||
"""Associate the given id with the given project module."""
|
||||
self.id2module[id] = module
|
||||
|
||||
def current(self):
|
||||
"""Returns the project which is currently being loaded."""
|
||||
return self.current_project
|
||||
|
||||
def push_current(self, project):
|
||||
"""Temporary changes the current project to 'project'. Should
|
||||
be followed by 'pop-current'."""
|
||||
self.saved_current_project.append(self.current_project)
|
||||
self.current_project = project
|
||||
|
||||
def pop_current(self):
|
||||
self.current_project = self.saved_current_project[-1]
|
||||
del self.saved_current_project[-1]
|
||||
|
||||
def attributes(self, project):
|
||||
"""Returns the project-attribute instance for the
|
||||
specified jamfile module."""
|
||||
return self.module2attributes[project]
|
||||
|
||||
def attribute(self, project, attribute):
|
||||
"""Returns the value of the specified attribute in the
|
||||
specified jamfile module."""
|
||||
return self.module2attributes[project].get(attribute)
|
||||
|
||||
def target(self, project_module):
|
||||
"""Returns the project target corresponding to the 'project-module'."""
|
||||
if not self.module2target[project_module]:
|
||||
self.module2target[project_module] = \
|
||||
ProjectTarget(project_module, project_module,
|
||||
self.attribute(project_module, "requirements"))
|
||||
|
||||
return self.module2target[project_module]
|
||||
|
||||
def use(self, id, location):
|
||||
# Use/load a project.
|
||||
saved_project = self.current_project
|
||||
project_module = self.load(location)
|
||||
declared_id = self.attribute(project_module, "id")
|
||||
|
||||
if not declared_id or declared_id != id:
|
||||
# The project at 'location' either have no id or
|
||||
# that id is not equal to the 'id' parameter.
|
||||
if self.id2module[id] and self.id2module[id] != project_module:
|
||||
self.manager.errors()(
|
||||
"""Attempt to redeclare already existing project id '%s'""" % id)
|
||||
self.id2module[id] = project_module
|
||||
|
||||
self.current_module = saved_project
|
||||
|
||||
def add_rule(self, name, callable):
|
||||
"""Makes rule 'name' available to all subsequently loaded Jamfiles.
|
||||
|
||||
Calling that rule wil relay to 'callable'."""
|
||||
self.project_rules_.add_rule(name, callable)
|
||||
|
||||
def project_rules(self):
|
||||
return self.project_rules_
|
||||
|
||||
def glob_internal(self, project, wildcards, excludes, rule_name):
|
||||
location = project.get("source-location")
|
||||
|
||||
result = []
|
||||
callable = b2.util.path.__dict__[rule_name]
|
||||
|
||||
paths = callable(location, wildcards, excludes)
|
||||
has_dir = 0
|
||||
for w in wildcards:
|
||||
if os.path.dirname(w):
|
||||
has_dir = 1
|
||||
break
|
||||
|
||||
if has_dir or rule_name != "glob":
|
||||
# The paths we've found are relative to current directory,
|
||||
# but the names specified in sources list are assumed to
|
||||
# be relative to source directory of the corresponding
|
||||
# prject. So, just make the name absolute.
|
||||
result = [os.path.join(os.getcwd(), p) for p in paths]
|
||||
else:
|
||||
# There were not directory in wildcard, so the files are all
|
||||
# in the source directory of the project. Just drop the
|
||||
# directory, instead of making paths absolute.
|
||||
result = [os.path.basename(p) for p in paths]
|
||||
|
||||
return result
|
||||
|
||||
def load_module(self, name, extra_path=None):
|
||||
"""Classic Boost.Build 'modules' are in fact global variables.
|
||||
Therefore, try to find an already loaded Python module called 'name' in sys.modules.
|
||||
If the module ist not loaded, find it Boost.Build search
|
||||
path and load it. The new module is not entered in sys.modules.
|
||||
The motivation here is to have disjoint namespace of modules
|
||||
loaded via 'import/using' in Jamfile, and ordinary Python
|
||||
modules. We don't want 'using foo' in Jamfile to load ordinary
|
||||
Python module 'foo' which is going to not work. And we
|
||||
also don't want 'import foo' in regular Python module to
|
||||
accidentally grab module named foo that is internal to
|
||||
Boost.Build and intended to provide interface to Jamfiles."""
|
||||
|
||||
existing = self.loaded_tool_modules_.get(name)
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
modules = sys.modules
|
||||
for class_name in modules:
|
||||
if name is class_name:
|
||||
module = modules[class_name]
|
||||
self.loaded_tool_modules_[name] = module
|
||||
return module
|
||||
|
||||
path = extra_path
|
||||
if not path:
|
||||
path = []
|
||||
path.extend(self.manager.boost_build_path())
|
||||
location = None
|
||||
for p in path:
|
||||
l = os.path.join(p, name + ".py")
|
||||
if os.path.exists(l):
|
||||
location = l
|
||||
break
|
||||
|
||||
if not location:
|
||||
self.manager.errors()("Cannot find module '%s'" % name)
|
||||
|
||||
mname = "__build_build_temporary__"
|
||||
file = open(location)
|
||||
try:
|
||||
# TODO: this means we'll never make use of .pyc module,
|
||||
# which might be a problem, or not.
|
||||
module = imp.load_module(mname, file, os.path.basename(location),
|
||||
(".py", "r", imp.PY_SOURCE))
|
||||
del sys.modules[mname]
|
||||
self.loaded_tool_modules_[name] = module
|
||||
return module
|
||||
finally:
|
||||
file.close()
|
||||
|
||||
|
||||
|
||||
# FIXME:
|
||||
# Defines a Boost.Build extension project. Such extensions usually
|
||||
# contain library targets and features that can be used by many people.
|
||||
# Even though extensions are really projects, they can be initialize as
|
||||
# a module would be with the "using" (project.project-rules.using)
|
||||
# mechanism.
|
||||
#rule extension ( id : options * : * )
|
||||
#{
|
||||
# # The caller is a standalone module for the extension.
|
||||
# local mod = [ CALLER_MODULE ] ;
|
||||
#
|
||||
# # We need to do the rest within the extension module.
|
||||
# module $(mod)
|
||||
# {
|
||||
# import path ;
|
||||
#
|
||||
# # Find the root project.
|
||||
# local root-project = [ project.current ] ;
|
||||
# root-project = [ $(root-project).project-module ] ;
|
||||
# while
|
||||
# [ project.attribute $(root-project) parent-module ] &&
|
||||
# [ project.attribute $(root-project) parent-module ] != user-config
|
||||
# {
|
||||
# root-project = [ project.attribute $(root-project) parent-module ] ;
|
||||
# }
|
||||
#
|
||||
# # Create the project data, and bring in the project rules
|
||||
# # into the module.
|
||||
# project.initialize $(__name__) :
|
||||
# [ path.join [ project.attribute $(root-project) location ] ext $(1:L) ] ;
|
||||
#
|
||||
# # Create the project itself, i.e. the attributes.
|
||||
# # All extensions are created in the "/ext" project space.
|
||||
# project /ext/$(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
|
||||
# local attributes = [ project.attributes $(__name__) ] ;
|
||||
#
|
||||
# # Inherit from the root project of whomever is defining us.
|
||||
# project.inherit-attributes $(__name__) : $(root-project) ;
|
||||
# $(attributes).set parent-module : $(root-project) : exact ;
|
||||
# }
|
||||
#}
|
||||
|
||||
|
||||
class ProjectAttributes:
|
||||
"""Class keeping all the attributes of a project.
|
||||
|
||||
The standard attributes are 'id', "location", "project-root", "parent"
|
||||
"requirements", "default-build", "source-location" and "projects-to-build".
|
||||
"""
|
||||
|
||||
def __init__(self, manager, location, project_module):
|
||||
self.manager = manager
|
||||
self.location = location
|
||||
self.project_module = project_module
|
||||
self.attributes = {}
|
||||
self.usage_requirements = None
|
||||
|
||||
def set(self, attribute, specification, exact):
|
||||
"""Set the named attribute from the specification given by the user.
|
||||
The value actually set may be different."""
|
||||
|
||||
if exact:
|
||||
self.__dict__[attribute] = specification
|
||||
|
||||
elif attribute == "requirements":
|
||||
self.requirements = property_set.refine_from_user_input(
|
||||
self.requirements, specification,
|
||||
self.project_module, self.location)
|
||||
|
||||
elif attribute == "usage-requirements":
|
||||
unconditional = []
|
||||
for p in specification:
|
||||
split = property.split_conditional(p)
|
||||
if split:
|
||||
unconditional.append(split[1])
|
||||
else:
|
||||
unconditional.append(p)
|
||||
|
||||
non_free = property.remove("free", unconditional)
|
||||
if non_free:
|
||||
pass
|
||||
# FIXME:
|
||||
#errors.error "usage-requirements" $(specification) "have non-free properties" $(non-free) ;
|
||||
|
||||
t = property.translate_paths(specification, self.location)
|
||||
|
||||
existing = self.__dict__.get("usage-requirements")
|
||||
if existing:
|
||||
new = property_set.create(existing.raw() + t)
|
||||
else:
|
||||
new = property_set.create(t)
|
||||
self.__dict__["usage-requirements"] = new
|
||||
|
||||
|
||||
elif attribute == "default-build":
|
||||
self.__dict__["default-build"] = property_set.create(specification)
|
||||
|
||||
elif attribute == "source-location":
|
||||
source_location = []
|
||||
for path in specification:
|
||||
source_location += os.path.join(self.location, path)
|
||||
self.__dict__["source-location"] = source_location
|
||||
|
||||
elif attribute == "build-dir":
|
||||
self.__dict__["build-dir"] = os.path.join(self.location, specification)
|
||||
|
||||
elif not attribute in ["id", "default-build", "location",
|
||||
"source-location", "parent",
|
||||
"projects-to-build", "project-root"]:
|
||||
self.manager.errors()(
|
||||
"""Invalid project attribute '%s' specified
|
||||
for project at '%s'""" % (attribute, self.location))
|
||||
else:
|
||||
self.__dict__[attribute] = specification
|
||||
|
||||
def get(self, attribute):
|
||||
return self.__dict__[attribute]
|
||||
|
||||
def dump(self):
|
||||
"""Prints the project attributes."""
|
||||
id = self.get("id")
|
||||
if not id:
|
||||
id = "(none)"
|
||||
else:
|
||||
id = id[0]
|
||||
|
||||
parent = self.get("parent")
|
||||
if not parent:
|
||||
parent = "(none)"
|
||||
else:
|
||||
parent = parent[0]
|
||||
|
||||
print "'%s'" % id
|
||||
print "Parent project:%s", parent
|
||||
print "Requirements:%s", self.get("requirements")
|
||||
print "Default build:%s", string.join(self.get("debuild-build"))
|
||||
print "Source location:%s", string.join(self.get("source-location"))
|
||||
print "Projects to build:%s", string.join(self.get("projects-to-build").sort());
|
||||
|
||||
class ProjectRules:
|
||||
"""Class keeping all rules that are made available to Jamfile."""
|
||||
|
||||
def __init__(self, registry):
|
||||
self.registry = registry
|
||||
self.manager_ = registry.manager
|
||||
self.rules = {}
|
||||
self.local_names = [x for x in self.__class__.__dict__
|
||||
if x not in ["__init__", "init_project", "add_rule",
|
||||
"error_reporting_wrapper", "add_rule_for_type"]]
|
||||
self.all_names_ = [x for x in self.local_names]
|
||||
|
||||
def add_rule_for_type(self, type):
|
||||
rule_name = type.lower();
|
||||
|
||||
def xpto (name, sources, requirements = [], default_build = None, usage_requirements = []):
|
||||
return self.manager_.targets().create_typed_target(
|
||||
type, self.registry.current(), name[0], sources,
|
||||
requirements, default_build, usage_requirements)
|
||||
|
||||
self.add_rule(type.lower(), xpto)
|
||||
|
||||
def add_rule(self, name, callable):
|
||||
self.rules[name] = callable
|
||||
self.all_names_.append(name)
|
||||
|
||||
def all_names(self):
|
||||
return self.all_names_
|
||||
|
||||
def call_and_report_errors(self, callable, *args):
|
||||
result = None
|
||||
try:
|
||||
self.manager_.errors().push_jamfile_context()
|
||||
result = callable(*args)
|
||||
except ExceptionWithUserContext, e:
|
||||
e.report()
|
||||
except Exception, e:
|
||||
try:
|
||||
self.manager_.errors().handle_stray_exception (e)
|
||||
except ExceptionWithUserContext, e:
|
||||
e.report()
|
||||
finally:
|
||||
self.manager_.errors().pop_jamfile_context()
|
||||
|
||||
return result
|
||||
|
||||
def make_wrapper(self, callable):
|
||||
"""Given a free-standing function 'callable', return a new
|
||||
callable that will call 'callable' and report all exceptins,
|
||||
using 'call_and_report_errors'."""
|
||||
def wrapper(*args):
|
||||
self.call_and_report_errors(callable, *args)
|
||||
return wrapper
|
||||
|
||||
def init_project(self, project_module):
|
||||
|
||||
for n in self.local_names:
|
||||
# Using 'getattr' here gives us a bound method,
|
||||
# while using self.__dict__[r] would give unbound one.
|
||||
v = getattr(self, n)
|
||||
if callable(v):
|
||||
if n == "import_":
|
||||
n = "import"
|
||||
else:
|
||||
n = string.replace(n, "_", "-")
|
||||
|
||||
bjam.import_rule(project_module, n,
|
||||
self.make_wrapper(v))
|
||||
|
||||
for n in self.rules:
|
||||
bjam.import_rule(project_module, n,
|
||||
self.make_wrapper(self.rules[n]))
|
||||
|
||||
def project(self, *args):
|
||||
|
||||
jamfile_module = self.registry.current().project_module()
|
||||
attributes = self.registry.attributes(jamfile_module)
|
||||
|
||||
id = None
|
||||
if args and args[0]:
|
||||
id = args[0][0]
|
||||
args = args[1:]
|
||||
|
||||
if id:
|
||||
if id[0] != '/':
|
||||
id = '/' + id
|
||||
self.registry.register_id (id, jamfile_module)
|
||||
|
||||
explicit_build_dir = None
|
||||
for a in args:
|
||||
if a:
|
||||
attributes.set(a[0], a[1:], exact=0)
|
||||
if a[0] == "build-dir":
|
||||
explicit_build_dir = a[1]
|
||||
|
||||
# If '--build-dir' is specified, change the build dir for the project.
|
||||
if self.registry.global_build_dir:
|
||||
|
||||
location = attributes.get("location")
|
||||
# Project with empty location is 'standalone' project, like
|
||||
# user-config, or qt. It has no build dir.
|
||||
# If we try to set build dir for user-config, we'll then
|
||||
# try to inherit it, with either weird, or wrong consequences.
|
||||
if location and location == attributes.get("project-root"):
|
||||
# This is Jamroot.
|
||||
if id:
|
||||
if explicit_build_dir and os.path.isabs(explicit_build_dir):
|
||||
self.register.manager.errors()(
|
||||
"""Absolute directory specified via 'build-dir' project attribute
|
||||
Don't know how to combine that with the --build-dir option.""")
|
||||
|
||||
rid = id
|
||||
if rid[0] == '/':
|
||||
rid = rid[1:]
|
||||
|
||||
p = os.path.join(self.registry.global_build_dir,
|
||||
rid, explicit_build_dir)
|
||||
attributes.set("build-dir", p, exact=1)
|
||||
elif explicit_build_dir:
|
||||
self.registry.manager.errors()(
|
||||
"""When --build-dir is specified, the 'build-project'
|
||||
attribute is allowed only for top-level 'project' invocations""")
|
||||
|
||||
def constant(self, name, value):
|
||||
"""Declare and set a project global constant.
|
||||
Project global constants are normal variables but should
|
||||
not be changed. They are applied to every child Jamfile."""
|
||||
m = "Jamfile</home/ghost/Work/Boost/boost-svn/tools/build/v2_python/python/tests/bjam/make>"
|
||||
self.registry.current().add_constant(name[0], value)
|
||||
|
||||
def path_constant(self, name, value):
|
||||
"""Declare and set a project global constant, whose value is a path. The
|
||||
path is adjusted to be relative to the invocation directory. The given
|
||||
value path is taken to be either absolute, or relative to this project
|
||||
root."""
|
||||
self.registry.current().add_constant(name[0], value, path=1)
|
||||
|
||||
def use_project(self, id, where):
|
||||
# See comment in 'load' for explanation why we record the
|
||||
# parameters as opposed to loading the project now.
|
||||
m = self.registry.current().project_module();
|
||||
self.registry.used_projects[m].append((id, where))
|
||||
|
||||
def build_project(self, dir):
|
||||
assert(isinstance(dir, list))
|
||||
jamfile_module = self.registry.current().project_module()
|
||||
attributes = self.registry.attributes(jamfile_module)
|
||||
now = attributes.get("projects-to-build")
|
||||
attributes.set("projects-to-build", now + dir, exact=True)
|
||||
|
||||
def explicit(self, target_names):
|
||||
t = self.registry.current()
|
||||
for n in target_names:
|
||||
t.mark_target_as_explicit(n)
|
||||
|
||||
def glob(self, wildcards, excludes=None):
|
||||
return self.registry.glob_internal(self.registry.current(),
|
||||
wildcards, excludes, "glob")
|
||||
|
||||
def glob_tree(self, wildcards, excludes=None):
|
||||
bad = 0
|
||||
for p in wildcards:
|
||||
if os.path.dirname(p):
|
||||
bad = 1
|
||||
|
||||
if excludes:
|
||||
for p in excludes:
|
||||
if os.path.dirname(p):
|
||||
bad = 1
|
||||
|
||||
if bad:
|
||||
self.registry.manager().errors()(
|
||||
"The patterns to 'glob-tree' may not include directory")
|
||||
return self.registry.glob_internal(self.registry.current(),
|
||||
wildcards, excludes, "glob_tree")
|
||||
|
||||
|
||||
def using(self, toolset, *args):
|
||||
# The module referred by 'using' can be placed in
|
||||
# the same directory as Jamfile, and the user
|
||||
# will expect the module to be found even though
|
||||
# the directory is not in BOOST_BUILD_PATH.
|
||||
# So temporary change the search path.
|
||||
jamfile_module = self.registry.current().project_module()
|
||||
attributes = self.registry.attributes(jamfile_module)
|
||||
location = attributes.get("location")
|
||||
|
||||
m = self.registry.load_module(toolset[0], [location])
|
||||
if not m.__dict__.has_key("init"):
|
||||
self.registry.manager.errors()(
|
||||
"Tool module '%s' does not define the 'init' method" % toolset[0])
|
||||
m.init(*args)
|
||||
|
||||
|
||||
def import_(self, name, names_to_import=None, local_names=None):
|
||||
|
||||
name = name[0]
|
||||
jamfile_module = self.registry.current().project_module()
|
||||
attributes = self.registry.attributes(jamfile_module)
|
||||
location = attributes.get("location")
|
||||
|
||||
m = self.registry.load_module(name, [location])
|
||||
|
||||
for f in m.__dict__:
|
||||
v = m.__dict__[f]
|
||||
if callable(v):
|
||||
bjam.import_rule(jamfile_module, name + "." + f, v)
|
||||
|
||||
if names_to_import:
|
||||
if not local_names:
|
||||
local_names = names_to_import
|
||||
|
||||
if len(names_to_import) != len(local_names):
|
||||
self.registry.manager.errors()(
|
||||
"""The number of names to import and local names do not match.""")
|
||||
|
||||
for n, l in zip(names_to_import, local_names):
|
||||
bjam.import_rule(jamfile_module, l, m.__dict__[n])
|
||||
|
||||
def conditional(self, condition, requirements):
|
||||
"""Calculates conditional requirements for multiple requirements
|
||||
at once. This is a shorthand to be reduce duplication and to
|
||||
keep an inline declarative syntax. For example:
|
||||
|
||||
lib x : x.cpp : [ conditional <toolset>gcc <variant>debug :
|
||||
<define>DEBUG_EXCEPTION <define>DEBUG_TRACE ] ;
|
||||
"""
|
||||
|
||||
c = string.join(condition, ",")
|
||||
return [c + ":" + r for r in requirements]
|
||||
636
src/build/property.py
Normal file
636
src/build/property.py
Normal file
@@ -0,0 +1,636 @@
|
||||
# Status: ported, except for tests and --abbreviate-paths.
|
||||
# Base revision: 40480
|
||||
#
|
||||
# Copyright 2001, 2002, 2003 Dave Abrahams
|
||||
# Copyright 2006 Rene Rivera
|
||||
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import re
|
||||
from b2.util.utility import *
|
||||
from b2.build import feature
|
||||
from b2.util import sequence, set
|
||||
|
||||
__re_two_ampersands = re.compile ('&&')
|
||||
__re_comma = re.compile (',')
|
||||
__re_split_condition = re.compile ('(.*):(<.*)')
|
||||
__re_toolset_feature = re.compile ('^(<toolset>|<toolset->)')
|
||||
__re_os_feature = re.compile ('^(<os>)')
|
||||
__re_split_conditional = re.compile (r'(.+):<(.+)')
|
||||
__re_colon = re.compile (':')
|
||||
__re_has_condition = re.compile (r':<')
|
||||
__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __results
|
||||
|
||||
# A cache of results from as_path
|
||||
__results = {}
|
||||
|
||||
reset ()
|
||||
|
||||
|
||||
def path_order (x, y):
|
||||
""" Helper for as_path, below. Orders properties with the implicit ones
|
||||
first, and within the two sections in alphabetical order of feature
|
||||
name.
|
||||
"""
|
||||
if x == y:
|
||||
return 0
|
||||
|
||||
xg = get_grist (x)
|
||||
yg = get_grist (y)
|
||||
|
||||
if yg and not xg:
|
||||
return -1
|
||||
|
||||
elif xg and not yg:
|
||||
return 1
|
||||
|
||||
else:
|
||||
if not xg:
|
||||
x = feature.expand_subfeatures([x])
|
||||
y = feature.expand_subfeatures([y])
|
||||
|
||||
if x < y:
|
||||
return -1
|
||||
elif x > y:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def abbreviate_dashed(string):
|
||||
# FIXME: string.abbreviate?
|
||||
return [string.abbreviate(part) for part in string.split('-')].join('-')
|
||||
|
||||
def identify(string):
|
||||
return string
|
||||
|
||||
# FIXME: --abbreviate-paths
|
||||
|
||||
def as_path (properties):
|
||||
""" Returns a path which represents the given expanded property set.
|
||||
"""
|
||||
key = '-'.join (properties)
|
||||
|
||||
if not __results.has_key (key):
|
||||
# trim redundancy
|
||||
properties = feature.minimize (properties)
|
||||
|
||||
# sort according to path_order
|
||||
properties.sort (path_order)
|
||||
|
||||
components = []
|
||||
for p in properties:
|
||||
pg = get_grist (p)
|
||||
# FIXME: abbrev?
|
||||
if pg:
|
||||
f = ungrist (pg)
|
||||
components.append (f + '-' + replace_grist (p, ''))
|
||||
|
||||
else:
|
||||
components.append (p)
|
||||
|
||||
__results [key] = '/'.join (components)
|
||||
|
||||
return __results [key]
|
||||
|
||||
def refine (properties, requirements):
|
||||
""" Refines 'properties' by overriding any non-free properties
|
||||
for which a different value is specified in 'requirements'.
|
||||
Conditional requirements are just added without modification.
|
||||
Returns the resulting list of properties.
|
||||
"""
|
||||
# The result has no duplicates, so we store it in a map
|
||||
# TODO: use a set from Python 2.4?
|
||||
result = {}
|
||||
|
||||
# Records all requirements.
|
||||
required = {}
|
||||
|
||||
# All the elements of requirements should be present in the result
|
||||
# Record them so that we can handle 'properties'.
|
||||
for r in requirements:
|
||||
# Don't consider conditional requirements.
|
||||
if not is_conditional (r):
|
||||
# Note: cannot use local here, so take an ugly name
|
||||
required [get_grist (r)] = replace_grist (r, '')
|
||||
|
||||
for p in properties:
|
||||
# Skip conditional properties
|
||||
if is_conditional (p):
|
||||
result [p] = None
|
||||
# No processing for free properties
|
||||
elif 'free' in feature.attributes (get_grist (p)):
|
||||
result [p] = None
|
||||
else:
|
||||
if required.has_key (get_grist (p)):
|
||||
required_value = required [get_grist (p)]
|
||||
|
||||
value = replace_grist (p, '')
|
||||
|
||||
if value != required_value:
|
||||
result [replace_grist (required_value, get_grist (p))] = None
|
||||
else:
|
||||
result [p] = None
|
||||
else:
|
||||
result [p] = None
|
||||
|
||||
return result.keys () + requirements
|
||||
|
||||
def translate_paths (properties, path):
|
||||
""" Interpret all path properties in 'properties' as relative to 'path'
|
||||
The property values are assumed to be in system-specific form, and
|
||||
will be translated into normalized form.
|
||||
"""
|
||||
result = []
|
||||
|
||||
for p in properties:
|
||||
split = split_conditional (p)
|
||||
|
||||
condition = ''
|
||||
|
||||
if split:
|
||||
condition = split [0]
|
||||
p = split [1]
|
||||
|
||||
if get_grist (p) and 'path' in feature.attributes (get_grist (p)):
|
||||
values = __re_two_ampersands.split (forward_slashes (get_grist (p)))
|
||||
|
||||
t = [os.path.join(path, v) for v in values]
|
||||
t = '&&'.join (t)
|
||||
tp = backslashes_to_slashes (replace_grist (t, get_grist (p)))
|
||||
result.append (condition + tp)
|
||||
|
||||
else:
|
||||
result.append (condition + p)
|
||||
|
||||
return result
|
||||
|
||||
def translate_indirect(specification, context_module):
|
||||
"""Assumes that all feature values that start with '@' are
|
||||
names of rules, used in 'context-module'. Such rules can be
|
||||
either local to the module or global. Qualified local rules
|
||||
with the name of the module."""
|
||||
result = []
|
||||
for p in specification:
|
||||
if p[0] == '@':
|
||||
m = p[1:]
|
||||
if not '.' in p:
|
||||
# This is unqualified rule name. The user might want
|
||||
# to set flags on this rule name, and toolset.flag
|
||||
# auto-qualifies the rule name. Need to do the same
|
||||
# here so set flag setting work.
|
||||
# We can arrange for toolset.flag to *not* auto-qualify
|
||||
# the argument, but then two rules defined in two Jamfiles
|
||||
# will conflict.
|
||||
m = context_module + "." + m
|
||||
|
||||
result.append(get_grist(p) + "@" + m)
|
||||
else:
|
||||
result.append(p)
|
||||
|
||||
return result
|
||||
|
||||
def validate (properties):
|
||||
""" Exit with error if any of the properties is not valid.
|
||||
properties may be a single property or a sequence of properties.
|
||||
"""
|
||||
|
||||
if isinstance (properties, str):
|
||||
__validate1 (properties)
|
||||
else:
|
||||
for p in properties:
|
||||
__validate1 (p)
|
||||
|
||||
def expand_subfeatures_in_conditions (properties):
|
||||
|
||||
result = []
|
||||
for p in properties:
|
||||
s = __re_split_condition.match (p)
|
||||
|
||||
if not s:
|
||||
result.append (p)
|
||||
|
||||
else:
|
||||
condition = s.group (1)
|
||||
|
||||
# Condition might include several elements
|
||||
condition = __re_comma.split (condition)
|
||||
|
||||
value = s.group (2)
|
||||
|
||||
e = []
|
||||
for c in condition:
|
||||
|
||||
cg = get_grist (c)
|
||||
if __re_toolset_feature.match (cg) or __re_os_feature.match (cg):
|
||||
# It common that condition includes a toolset which
|
||||
# was never defined, or mentiones subfeatures which
|
||||
# were never defined. In that case, validation will
|
||||
# only produce an spirious error, so don't validate.
|
||||
e.append (feature.expand_subfeatures (c, True))
|
||||
|
||||
else:
|
||||
e.append (feature.expand_subfeatures (c))
|
||||
|
||||
if e == condition:
|
||||
result.append (p)
|
||||
|
||||
else:
|
||||
individual_subfeatures = Set.difference (e, condition)
|
||||
result.append (','.join (individual_subfeatures) + ':' + value)
|
||||
|
||||
return result
|
||||
|
||||
def make (specification):
|
||||
""" Converts implicit values into full properties.
|
||||
"""
|
||||
result = []
|
||||
for e in specification:
|
||||
if get_grist (e):
|
||||
result.append (e)
|
||||
|
||||
elif feature.is_implicit_value (e):
|
||||
f = feature.implied_feature (e)
|
||||
result.append (f + e)
|
||||
|
||||
else:
|
||||
raise InvalidProperty ("'%s' is not a valid for property specification" % e)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def split_conditional (property):
|
||||
""" If 'property' is conditional property, returns
|
||||
condition and the property, e.g
|
||||
<variant>debug,<toolset>gcc:<inlining>full will become
|
||||
<variant>debug,<toolset>gcc <inlining>full.
|
||||
Otherwise, returns empty string.
|
||||
"""
|
||||
m = __re_split_conditional.match (property)
|
||||
|
||||
if m:
|
||||
return (m.group (1), '<' + m.group (2))
|
||||
|
||||
return None
|
||||
|
||||
def is_conditional (property):
|
||||
""" Returns True if a property is conditional.
|
||||
"""
|
||||
if __re_colon.search (replace_grist (property, '')):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def select (features, properties):
|
||||
""" Selects properties which correspond to any of the given features.
|
||||
"""
|
||||
result = []
|
||||
|
||||
# add any missing angle brackets
|
||||
features = add_grist (features)
|
||||
|
||||
return [p for p in properties if get_grist(p) in features]
|
||||
|
||||
def validate_property_sets (sets):
|
||||
for s in sets:
|
||||
validate(feature.split(s))
|
||||
|
||||
|
||||
def evaluate_conditionals_in_context (properties, context):
|
||||
""" Removes all conditional properties which conditions are not met
|
||||
For those with met conditions, removes the condition. Properies
|
||||
in conditions are looked up in 'context'
|
||||
"""
|
||||
base = []
|
||||
conditionals = []
|
||||
|
||||
for p in properties:
|
||||
if __re_has_condition.search (p):
|
||||
conditionals.append (p)
|
||||
else:
|
||||
base.append (p)
|
||||
|
||||
result = base
|
||||
for p in conditionals:
|
||||
|
||||
# Separate condition and property
|
||||
s = __re_separate_condition_and_property.match (p)
|
||||
|
||||
# Split condition into individual properties
|
||||
conditions = s.group (1).split (',')
|
||||
|
||||
# Evaluate condition
|
||||
if set.contains (c, context):
|
||||
result.append (s.group (2))
|
||||
|
||||
return result
|
||||
|
||||
def expand_subfeatures_in_conditions(properties):
|
||||
|
||||
result = []
|
||||
for p in properties:
|
||||
|
||||
s = __re_separate_condition_and_property.match(p)
|
||||
if not s:
|
||||
result.append(p)
|
||||
else:
|
||||
condition = s.group(1)
|
||||
# Condition might include several elements
|
||||
condition = condition.split(",")
|
||||
value = s.group(2)
|
||||
|
||||
e = []
|
||||
for c in condition:
|
||||
# It common that condition includes a toolset which
|
||||
# was never defined, or mentiones subfeatures which
|
||||
# were never defined. In that case, validation will
|
||||
# only produce an spirious error, so prevent
|
||||
# validation by passing 'true' as second parameter.
|
||||
e.extend(feature.expand_subfeatures(c, dont_validate=True))
|
||||
|
||||
if e == condition:
|
||||
result.append(p)
|
||||
else:
|
||||
individual_subfeatures = set.difference(e, condition)
|
||||
result.append(",".join(individual_subfeatures) + ":" + value)
|
||||
|
||||
return result
|
||||
|
||||
def change (properties, feature, value = None):
|
||||
""" Returns a modified version of properties with all values of the
|
||||
given feature replaced by the given value.
|
||||
If 'value' is None the feature will be removed.
|
||||
"""
|
||||
result = []
|
||||
|
||||
feature = add_grist (feature)
|
||||
|
||||
for p in properties:
|
||||
if get_grist (p) == feature:
|
||||
if value:
|
||||
result.append (replace_grist (value, feature))
|
||||
|
||||
else:
|
||||
result.append (p)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
################################################################
|
||||
# Private functions
|
||||
|
||||
def __validate1 (property):
|
||||
""" Exit with error if property is not valid.
|
||||
"""
|
||||
msg = None
|
||||
|
||||
f = get_grist (property)
|
||||
if f:
|
||||
value = get_value (property)
|
||||
|
||||
if not feature.valid (f):
|
||||
f = ungrist (get_grist (property)) # Ungrist for better error messages
|
||||
msg = "Unknown feature '%s'" % f
|
||||
|
||||
elif value and not 'free' in feature.attributes (f):
|
||||
feature.validate_value_string (f, value)
|
||||
|
||||
elif not value:
|
||||
f = ungrist (get_grist (property)) # Ungrist for better error messages
|
||||
msg = "No value specified for feature '%s'" % f
|
||||
|
||||
else:
|
||||
f = feature.implied_feature (property)
|
||||
feature.validate_value_string (f, property)
|
||||
|
||||
if msg:
|
||||
# FIXME: don't use globals like this. Import here to
|
||||
# break circular dependency.
|
||||
from b2.manager import get_manager
|
||||
get_manager().errors()("Invalid property '%s': %s" % (property, msg))
|
||||
|
||||
|
||||
###################################################################
|
||||
# Still to port.
|
||||
# Original lines are prefixed with "# "
|
||||
#
|
||||
#
|
||||
# import utility : ungrist ;
|
||||
# import sequence : unique ;
|
||||
# import errors : error ;
|
||||
# import feature ;
|
||||
# import regex ;
|
||||
# import sequence ;
|
||||
# import set ;
|
||||
# import path ;
|
||||
# import assert ;
|
||||
#
|
||||
#
|
||||
|
||||
|
||||
# rule validate-property-sets ( property-sets * )
|
||||
# {
|
||||
# for local s in $(property-sets)
|
||||
# {
|
||||
# validate [ feature.split $(s) ] ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
|
||||
def remove(attributes, properties):
|
||||
"""Returns a property sets which include all the elements
|
||||
in 'properties' that do not have attributes listed in 'attributes'."""
|
||||
|
||||
result = []
|
||||
for e in properties:
|
||||
attributes_new = feature.attributes(get_grist(e))
|
||||
has_common_features = 0
|
||||
for a in attributes_new:
|
||||
if a in attributes:
|
||||
has_common_features = 1
|
||||
break
|
||||
|
||||
if not has_common_features:
|
||||
result += e
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def take(attributes, properties):
|
||||
"""Returns a property set which include all
|
||||
properties in 'properties' that have any of 'attributes'."""
|
||||
result = []
|
||||
for e in properties:
|
||||
if set.intersection(attributes, feature.attributes(get_grist(e))):
|
||||
result.append(e)
|
||||
return result
|
||||
|
||||
|
||||
class PropertyMap:
|
||||
""" Class which maintains a property set -> string mapping.
|
||||
"""
|
||||
def __init__ (self):
|
||||
self.__properties = []
|
||||
self.__values = []
|
||||
|
||||
def insert (self, properties, value):
|
||||
""" Associate value with properties.
|
||||
"""
|
||||
self.__properties.append(properties)
|
||||
self.__values.append(value)
|
||||
|
||||
def find (self, properties):
|
||||
""" Return the value associated with properties
|
||||
or any subset of it. If more than one
|
||||
subset has value assigned to it, return the
|
||||
value for the longest subset, if it's unique.
|
||||
"""
|
||||
return self.find_replace (properties)
|
||||
|
||||
def find_replace(self, properties, value=None):
|
||||
matches = []
|
||||
match_ranks = []
|
||||
|
||||
for i in range(0, len(self.__properties)):
|
||||
p = self.__properties[i]
|
||||
|
||||
if set.contains (p, properties):
|
||||
matches.append (i)
|
||||
match_ranks.append(len(p))
|
||||
|
||||
best = sequence.select_highest_ranked (matches, match_ranks)
|
||||
|
||||
if not best:
|
||||
return None
|
||||
|
||||
if len (best) > 1:
|
||||
raise NoBestMatchingAlternative ()
|
||||
|
||||
best = best [0]
|
||||
|
||||
original = self.__values[best]
|
||||
|
||||
if value:
|
||||
self.__values[best] = value
|
||||
|
||||
return original
|
||||
|
||||
# local rule __test__ ( )
|
||||
# {
|
||||
# import errors : try catch ;
|
||||
# import feature ;
|
||||
# import feature : feature subfeature compose ;
|
||||
#
|
||||
# # local rules must be explicitly re-imported
|
||||
# import property : path-order ;
|
||||
#
|
||||
# feature.prepare-test property-test-temp ;
|
||||
#
|
||||
# feature toolset : gcc : implicit symmetric ;
|
||||
# subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
|
||||
# 3.0 3.0.1 3.0.2 : optional ;
|
||||
# feature define : : free ;
|
||||
# feature runtime-link : dynamic static : symmetric link-incompatible ;
|
||||
# feature optimization : on off ;
|
||||
# feature variant : debug release : implicit composite symmetric ;
|
||||
# feature rtti : on off : link-incompatible ;
|
||||
#
|
||||
# compose <variant>debug : <define>_DEBUG <optimization>off ;
|
||||
# compose <variant>release : <define>NDEBUG <optimization>on ;
|
||||
#
|
||||
# import assert ;
|
||||
# import "class" : new ;
|
||||
#
|
||||
# validate <toolset>gcc <toolset>gcc-3.0.1 : $(test-space) ;
|
||||
#
|
||||
# assert.result <toolset>gcc <rtti>off <define>FOO
|
||||
# : refine <toolset>gcc <rtti>off
|
||||
# : <define>FOO
|
||||
# : $(test-space)
|
||||
# ;
|
||||
#
|
||||
# assert.result <toolset>gcc <optimization>on
|
||||
# : refine <toolset>gcc <optimization>off
|
||||
# : <optimization>on
|
||||
# : $(test-space)
|
||||
# ;
|
||||
#
|
||||
# assert.result <toolset>gcc <rtti>off
|
||||
# : refine <toolset>gcc : <rtti>off : $(test-space)
|
||||
# ;
|
||||
#
|
||||
# assert.result <toolset>gcc <rtti>off <rtti>off:<define>FOO
|
||||
# : refine <toolset>gcc : <rtti>off <rtti>off:<define>FOO
|
||||
# : $(test-space)
|
||||
# ;
|
||||
#
|
||||
# assert.result <toolset>gcc:<define>foo <toolset>gcc:<define>bar
|
||||
# : refine <toolset>gcc:<define>foo : <toolset>gcc:<define>bar
|
||||
# : $(test-space)
|
||||
# ;
|
||||
#
|
||||
# assert.result <define>MY_RELEASE
|
||||
# : evaluate-conditionals-in-context
|
||||
# <variant>release,<rtti>off:<define>MY_RELEASE
|
||||
# : <toolset>gcc <variant>release <rtti>off
|
||||
#
|
||||
# ;
|
||||
#
|
||||
# try ;
|
||||
# validate <feature>value : $(test-space) ;
|
||||
# catch "Invalid property '<feature>value': unknown feature 'feature'." ;
|
||||
#
|
||||
# try ;
|
||||
# validate <rtti>default : $(test-space) ;
|
||||
# catch \"default\" is not a known value of feature <rtti> ;
|
||||
#
|
||||
# validate <define>WHATEVER : $(test-space) ;
|
||||
#
|
||||
# try ;
|
||||
# validate <rtti> : $(test-space) ;
|
||||
# catch "Invalid property '<rtti>': No value specified for feature 'rtti'." ;
|
||||
#
|
||||
# try ;
|
||||
# validate value : $(test-space) ;
|
||||
# catch "value" is not a value of an implicit feature ;
|
||||
#
|
||||
#
|
||||
# assert.result <rtti>on
|
||||
# : remove free implicit : <toolset>gcc <define>foo <rtti>on : $(test-space) ;
|
||||
#
|
||||
# assert.result <include>a
|
||||
# : select include : <include>a <toolset>gcc ;
|
||||
#
|
||||
# assert.result <include>a
|
||||
# : select include bar : <include>a <toolset>gcc ;
|
||||
#
|
||||
# assert.result <include>a <toolset>gcc
|
||||
# : select include <bar> <toolset> : <include>a <toolset>gcc ;
|
||||
#
|
||||
# assert.result <toolset>kylix <include>a
|
||||
# : change <toolset>gcc <include>a : <toolset> kylix ;
|
||||
#
|
||||
# # Test ordinary properties
|
||||
# assert.result
|
||||
# : split-conditional <toolset>gcc
|
||||
# ;
|
||||
#
|
||||
# # Test properties with ":"
|
||||
# assert.result
|
||||
# : split-conditional <define>FOO=A::B
|
||||
# ;
|
||||
#
|
||||
# # Test conditional feature
|
||||
# assert.result <toolset>gcc,<toolset-gcc:version>3.0 <define>FOO
|
||||
# : split-conditional <toolset>gcc,<toolset-gcc:version>3.0:<define>FOO
|
||||
# ;
|
||||
#
|
||||
# feature.finish-test property-test-temp ;
|
||||
# }
|
||||
#
|
||||
|
||||
368
src/build/property_set.py
Normal file
368
src/build/property_set.py
Normal file
@@ -0,0 +1,368 @@
|
||||
# Status: ported.
|
||||
# Base revision: 40480
|
||||
|
||||
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
from b2.util.utility import *
|
||||
import property, feature, string
|
||||
from b2.exceptions import *
|
||||
from b2.util.sequence import unique
|
||||
from b2.util.set import difference
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __cache
|
||||
|
||||
# A cache of property sets
|
||||
# TODO: use a map of weak refs?
|
||||
__cache = {}
|
||||
|
||||
reset ()
|
||||
|
||||
|
||||
def create (raw_properties = []):
|
||||
""" Creates a new 'PropertySet' instance for the given raw properties,
|
||||
or returns an already existing one.
|
||||
"""
|
||||
raw_properties.sort ()
|
||||
raw_properties = unique (raw_properties)
|
||||
|
||||
key = '-'.join (raw_properties)
|
||||
|
||||
if not __cache.has_key (key):
|
||||
__cache [key] = PropertySet (raw_properties)
|
||||
|
||||
return __cache [key]
|
||||
|
||||
def create_with_validation (raw_properties):
|
||||
""" Creates new 'PropertySet' instances after checking
|
||||
that all properties are valid and converting incidental
|
||||
properties into gristed form.
|
||||
"""
|
||||
property.validate (raw_properties)
|
||||
|
||||
return create (property.make (raw_properties))
|
||||
|
||||
def empty ():
|
||||
""" Returns PropertySet with empty set of properties.
|
||||
"""
|
||||
return create ()
|
||||
|
||||
def create_from_user_input(raw_properties, jamfile_module, location):
|
||||
"""Creates a property-set from the input given by the user, in the
|
||||
context of 'jamfile-module' at 'location'"""
|
||||
|
||||
property.validate(raw_properties)
|
||||
|
||||
specification = property.translate_paths(raw_properties, location)
|
||||
specification = property.translate_indirect(specification, jamfile_module)
|
||||
specification = property.expand_subfeatures_in_conditions(specification)
|
||||
specification = property.make(specification)
|
||||
return create(specification)
|
||||
|
||||
|
||||
def refine_from_user_input(parent_requirements, specification, jamfile_module,
|
||||
location):
|
||||
"""Refines requirements with requirements provided by the user.
|
||||
Specially handles "-<property>value" syntax in specification
|
||||
to remove given requirements.
|
||||
- parent-requirements -- property-set object with requirements
|
||||
to refine
|
||||
- specification -- string list of requirements provided by the use
|
||||
- project-module -- the module to which context indirect features
|
||||
will be bound.
|
||||
- location -- the path to which path features are relative."""
|
||||
|
||||
|
||||
if not specification:
|
||||
return parent_requirements
|
||||
|
||||
|
||||
add_requirements = []
|
||||
remove_requirements = []
|
||||
|
||||
for r in specification:
|
||||
if r[0] == '-':
|
||||
remove_requirements.append(r[1:])
|
||||
else:
|
||||
add_requirements.append(r)
|
||||
|
||||
if remove_requirements:
|
||||
# Need to create property set, so that path features
|
||||
# and indirect features are translated just like they
|
||||
# are in project requirements.
|
||||
ps = create_from_user_input(remove_requirements,
|
||||
jamfile_module, location)
|
||||
|
||||
parent_requirements = create(difference(parent_requirements.raw(),
|
||||
ps.raw()))
|
||||
specification = add_requirements
|
||||
|
||||
requirements = create_from_user_input(specification,
|
||||
jamfile_module, location)
|
||||
|
||||
return parent_requirements.refine(requirements)
|
||||
|
||||
class PropertySet:
|
||||
""" Class for storing a set of properties.
|
||||
- there's 1<->1 correspondence between identity and value. No
|
||||
two instances of the class are equal. To maintain this property,
|
||||
the 'PropertySet.create' rule should be used to create new instances.
|
||||
Instances are immutable.
|
||||
|
||||
- each property is classified with regard to it's effect on build
|
||||
results. Incidental properties have no effect on build results, from
|
||||
Boost.Build point of view. Others are either free, or non-free, which we
|
||||
call 'base'. Each property belong to exactly one of those categories and
|
||||
it's possible to get list of properties in each category.
|
||||
|
||||
In addition, it's possible to get list of properties with specific
|
||||
attribute.
|
||||
|
||||
- several operations, like and refine and as_path are provided. They all use
|
||||
caching whenever possible.
|
||||
"""
|
||||
def __init__ (self, raw_properties = []):
|
||||
|
||||
self.raw_ = raw_properties
|
||||
|
||||
self.incidental_ = []
|
||||
self.free_ = []
|
||||
self.base_ = []
|
||||
self.dependency_ = []
|
||||
self.non_dependency_ = []
|
||||
self.conditional_ = []
|
||||
self.non_conditional_ = []
|
||||
self.propagated_ = []
|
||||
self.link_incompatible = []
|
||||
|
||||
# A cache of refined properties.
|
||||
self.refined_ = {}
|
||||
|
||||
# A cache of property sets created by adding properties to this one.
|
||||
self.added_ = {}
|
||||
|
||||
# Cache for the default properties.
|
||||
self.defaults_ = None
|
||||
|
||||
# Cache for the expanded properties.
|
||||
self.expanded_ = None
|
||||
|
||||
# Cache for the expanded composite properties
|
||||
self.composites_ = None
|
||||
|
||||
# Cache for the property set containing propagated properties.
|
||||
self.propagated_ps_ = None
|
||||
|
||||
# A map of features to its values.
|
||||
self.feature_map_ = None
|
||||
|
||||
# A tuple (target path, is relative to build directory)
|
||||
self.target_path_ = None
|
||||
|
||||
self.as_path_ = None
|
||||
|
||||
# A cache for already evaluated sets.
|
||||
self.evaluated_ = {}
|
||||
|
||||
for p in raw_properties:
|
||||
if not get_grist (p):
|
||||
raise BaseException ("Invalid property: '%s'" % p)
|
||||
|
||||
att = feature.attributes (get_grist (p))
|
||||
|
||||
# A feature can be both incidental and free,
|
||||
# in which case we add it to incidental.
|
||||
if 'incidental' in att:
|
||||
self.incidental_.append (p)
|
||||
elif 'free' in att:
|
||||
self.free_.append (p)
|
||||
else:
|
||||
self.base_.append (p)
|
||||
|
||||
if 'dependency' in att:
|
||||
self.dependency_.append (p)
|
||||
else:
|
||||
self.non_dependency_.append (p)
|
||||
|
||||
if property.is_conditional (p):
|
||||
self.conditional_.append (p)
|
||||
else:
|
||||
self.non_conditional_.append (p)
|
||||
|
||||
if 'propagated' in att:
|
||||
self.propagated_.append (p)
|
||||
|
||||
if 'link_incompatible' in att:
|
||||
self.link_incompatible.append (p)
|
||||
|
||||
def raw (self):
|
||||
""" Returns the list of stored properties.
|
||||
"""
|
||||
return self.raw_
|
||||
|
||||
def __str__(self):
|
||||
return string.join(self.raw_)
|
||||
|
||||
def base (self):
|
||||
""" Returns properties that are neither incidental nor free.
|
||||
"""
|
||||
return self.base_
|
||||
|
||||
def free (self):
|
||||
""" Returns free properties which are not dependency properties.
|
||||
"""
|
||||
return self.free_
|
||||
|
||||
def dependency (self):
|
||||
""" Returns dependency properties.
|
||||
"""
|
||||
return self.dependency_
|
||||
|
||||
def non_dependency (self):
|
||||
""" Returns properties that are not dependencies.
|
||||
"""
|
||||
return self.non_dependency_
|
||||
|
||||
def conditional (self):
|
||||
""" Returns conditional properties.
|
||||
"""
|
||||
return self.conditional_
|
||||
|
||||
def non_conditional (self):
|
||||
""" Returns properties that are not conditional.
|
||||
"""
|
||||
return self.non_conditional_
|
||||
|
||||
def incidental (self):
|
||||
""" Returns incidental properties.
|
||||
"""
|
||||
return self.incidental_
|
||||
|
||||
def refine (self, requirements):
|
||||
""" Refines this set's properties using the requirements passed as an argument.
|
||||
"""
|
||||
str_req = str (requirements)
|
||||
if not self.refined_.has_key (str_req):
|
||||
r = property.refine (self.raw (), requirements.raw ())
|
||||
|
||||
self.refined_ [str_req] = create (r)
|
||||
|
||||
return self.refined_ [str_req]
|
||||
|
||||
def expand (self):
|
||||
if not self.expanded_:
|
||||
expanded = feature.expand (self.raw_)
|
||||
self.expanded_ = create (expanded)
|
||||
return self.expanded_
|
||||
|
||||
def expand_componsite(self):
|
||||
if not self.componsites_:
|
||||
self.composites_ = create(feature.expand_composires(self.raw_))
|
||||
return self.composites_
|
||||
|
||||
def evaluate_conditionals(self, context=None):
|
||||
if not context:
|
||||
context = self
|
||||
|
||||
if not self.evaluated_.has_key(context):
|
||||
self.evaluated_[context] = create(
|
||||
property.evaluate_conditionals_in_context(self.raw_,
|
||||
context.raw()))
|
||||
|
||||
return self.evaluated_[context]
|
||||
|
||||
def propagated (self):
|
||||
if not self.propagated_ps_:
|
||||
self.propagated_ps_ = create (self.propagated_)
|
||||
return self.propagated_ps_
|
||||
|
||||
def add_defaults (self):
|
||||
if not self.defaults_:
|
||||
expanded = feature.add_defaults(self.raw_)
|
||||
self.defaults_ = create(expanded)
|
||||
return self.defaults_
|
||||
|
||||
def as_path (self):
|
||||
if not self.as_path_:
|
||||
self.as_path_ = property.as_path(self.base_)
|
||||
|
||||
return self.as_path_
|
||||
|
||||
def target_path (self):
|
||||
""" Computes the target path that should be used for
|
||||
target with these properties.
|
||||
Returns a tuple of
|
||||
- the computed path
|
||||
- if the path is relative to build directory, a value of
|
||||
'true'.
|
||||
"""
|
||||
if not self.target_path_:
|
||||
# The <location> feature can be used to explicitly
|
||||
# change the location of generated targets
|
||||
l = self.get ('<location>')
|
||||
if l:
|
||||
computed = l
|
||||
is_relative = False
|
||||
|
||||
else:
|
||||
p = self.as_path ()
|
||||
|
||||
# Really, an ugly hack. Boost regression test system requires
|
||||
# specific target paths, and it seems that changing it to handle
|
||||
# other directory layout is really hard. For that reason,
|
||||
# we teach V2 to do the things regression system requires.
|
||||
# The value o '<location-prefix>' is predended to the path.
|
||||
prefix = self.get ('<location-prefix>')
|
||||
|
||||
if prefix:
|
||||
if len (prefix) > 1:
|
||||
raise AlreadyDefined ("Two <location-prefix> properties specified: '%s'" % prefix)
|
||||
|
||||
computed = os.path.join(prefix[0], p)
|
||||
|
||||
else:
|
||||
computed = p
|
||||
|
||||
if not computed:
|
||||
computed = "."
|
||||
|
||||
is_relative = True
|
||||
|
||||
self.target_path_ = (computed, is_relative)
|
||||
|
||||
return self.target_path_
|
||||
|
||||
def add (self, ps):
|
||||
""" Creates a new property set containing the properties in this one,
|
||||
plus the ones of the property set passed as argument.
|
||||
"""
|
||||
if not self.added_.has_key (str (ps)):
|
||||
self.added_ [str (ps)] = create (self.raw_ + ps.raw ())
|
||||
return self.added_ [str (ps)]
|
||||
|
||||
def add_raw (self, properties):
|
||||
""" Creates a new property set containing the properties in this one,
|
||||
plus the ones passed as argument.
|
||||
"""
|
||||
return self.add (create (properties))
|
||||
|
||||
|
||||
def get (self, feature):
|
||||
""" Returns all values of 'feature'.
|
||||
"""
|
||||
if not self.feature_map_:
|
||||
self.feature_map_ = {}
|
||||
|
||||
for v in self.raw_:
|
||||
key = get_grist (v)
|
||||
if not self.feature_map_.has_key (key):
|
||||
self.feature_map_ [key] = []
|
||||
self.feature_map_ [get_grist (v)].append (replace_grist (v, ''))
|
||||
|
||||
return self.feature_map_.get (feature, [])
|
||||
|
||||
157
src/build/scanner.py
Normal file
157
src/build/scanner.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Status: ported.
|
||||
# Base revision: 45462
|
||||
#
|
||||
# Copyright 2003 Dave Abrahams
|
||||
# Copyright 2002, 2003, 2004, 2005 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# Implements scanners: objects that compute implicit dependencies for
|
||||
# files, such as includes in C++.
|
||||
#
|
||||
# Scanner has a regular expression used to find dependencies, some
|
||||
# data needed to interpret those dependencies (for example, include
|
||||
# paths), and a code which actually established needed relationship
|
||||
# between actual jam targets.
|
||||
#
|
||||
# Scanner objects are created by actions, when they try to actualize
|
||||
# virtual targets, passed to 'virtual-target.actualize' method and are
|
||||
# then associated with actual targets. It is possible to use
|
||||
# several scanners for a virtual-target. For example, a single source
|
||||
# might be used by to compile actions, with different include paths.
|
||||
# In this case, two different actual targets will be created, each
|
||||
# having scanner of its own.
|
||||
#
|
||||
# Typically, scanners are created from target type and action's
|
||||
# properties, using the rule 'get' in this module. Directly creating
|
||||
# scanners is not recommended, because it might create many equvivalent
|
||||
# but different instances, and lead in unneeded duplication of
|
||||
# actual targets. However, actions can also create scanners in a special
|
||||
# way, instead of relying on just target type.
|
||||
|
||||
import property
|
||||
import bjam
|
||||
from b2.exceptions import *
|
||||
from b2.manager import get_manager
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __scanners, __rv_cache, __scanner_cache
|
||||
|
||||
# Maps registered scanner classes to relevant properties
|
||||
__scanners = {}
|
||||
|
||||
# A cache of scanners.
|
||||
# The key is: class_name.properties_tag, where properties_tag is the concatenation
|
||||
# of all relevant properties, separated by '-'
|
||||
__scanner_cache = {}
|
||||
|
||||
reset ()
|
||||
|
||||
|
||||
def register(scanner_class, relevant_properties):
|
||||
""" Registers a new generator class, specifying a set of
|
||||
properties relevant to this scanner. Ctor for that class
|
||||
should have one parameter: list of properties.
|
||||
"""
|
||||
__scanners[str(scanner_class)] = relevant_properties
|
||||
|
||||
def registered(scanner_class):
|
||||
""" Returns true iff a scanner of that class is registered
|
||||
"""
|
||||
return __scanners.has_key(str(scanner_class))
|
||||
|
||||
def get(scanner_class, properties):
|
||||
""" Returns an instance of previously registered scanner
|
||||
with the specified properties.
|
||||
"""
|
||||
scanner_name = str(scanner_class)
|
||||
|
||||
if not registered(scanner_name):
|
||||
raise BaseException ("attempt to get unregisted scanner: %s" % scanner_name)
|
||||
|
||||
relevant_properties = __scanners[scanner_name]
|
||||
r = property.select(relevant_properties, properties)
|
||||
|
||||
scanner_id = scanner_name + '.' + '-'.join(r)
|
||||
|
||||
if not __scanner_cache.has_key(scanner_name):
|
||||
__scanner_cache[scanner_name] = scanner_class(r)
|
||||
|
||||
return __scanner_cache[scanner_name]
|
||||
|
||||
class Scanner:
|
||||
""" Base scanner class.
|
||||
"""
|
||||
def __init__ (self):
|
||||
pass
|
||||
|
||||
def pattern (self):
|
||||
""" Returns a pattern to use for scanning.
|
||||
"""
|
||||
raise BaseException ("method must be overriden")
|
||||
|
||||
def process (self, target, matches):
|
||||
""" Establish necessary relationship between targets,
|
||||
given actual target beeing scanned, and a list of
|
||||
pattern matches in that file.
|
||||
"""
|
||||
raise BaseException ("method must be overriden")
|
||||
|
||||
|
||||
# Common scanner class, which can be used when there's only one
|
||||
# kind of includes (unlike C, where "" and <> includes have different
|
||||
# search paths).
|
||||
def CommonScanner(Scanner):
|
||||
|
||||
def __init__ (self, includes):
|
||||
Scanner.__init__(self)
|
||||
self.includes = includes
|
||||
|
||||
def process(self, target, matches, binding):
|
||||
|
||||
target_path = os.path.normpath(os.path.dirname(binding[0]))
|
||||
bjam.call("mark-included", target, matches)
|
||||
|
||||
engine.set_target_variable(matches, "SEARCH",
|
||||
[target_path] + self.includes_)
|
||||
get_manager().scanners().propagate(self, matches)
|
||||
|
||||
class ScannerRegistry:
|
||||
|
||||
def __init__ (self, manager):
|
||||
self.manager_ = manager
|
||||
self.count_ = 0
|
||||
self.exported_scanners_ = {}
|
||||
|
||||
def install (self, scanner, target, vtarget):
|
||||
""" Installs the specified scanner on actual target 'target'.
|
||||
vtarget: virtual target from which 'target' was actualized.
|
||||
"""
|
||||
engine = self.manager_.engine()
|
||||
engine.set_target_variable(target, "HDRSCAN", scanner.pattern())
|
||||
if not self.exported_scanners_.has_key(scanner):
|
||||
exported_name = "scanner_" + str(self.count_)
|
||||
self.count_ = self.count_ + 1
|
||||
self.exported_scanners_[scanner] = exported_name
|
||||
bjam.import_rule("", exported_name, scanner.process)
|
||||
else:
|
||||
exported_name = self.exported_scanners_[scanner]
|
||||
|
||||
engine.set_target_variable(target, "HDRRULE", exported_name)
|
||||
|
||||
# scanner reflects difference in properties affecting
|
||||
# binding of 'target', which will be known when processing
|
||||
# includes for it, will give information on how to
|
||||
# interpret quoted includes.
|
||||
engine.set_target_variable(target, "HDRGRIST", str(id(scanner)))
|
||||
pass
|
||||
|
||||
def propagate(self, scanner, targets):
|
||||
engine = self.manager_.engine()
|
||||
engine.set_target_variable(targets, "HDRSCAN", scanner.pattern())
|
||||
engine.set_target_variable(targets, "HDRRULE",
|
||||
self.exported_scanners_[scanner])
|
||||
engine.set_target_variable(targets, "HDRGRIST", str(id(scanner)))
|
||||
|
||||
1264
src/build/targets.py
Normal file
1264
src/build/targets.py
Normal file
File diff suppressed because it is too large
Load Diff
402
src/build/toolset.py
Normal file
402
src/build/toolset.py
Normal file
@@ -0,0 +1,402 @@
|
||||
# Status: being ported by Vladimir Prus
|
||||
# Base revision: 40958
|
||||
#
|
||||
# Copyright 2003 Dave Abrahams
|
||||
# Copyright 2005 Rene Rivera
|
||||
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
""" Support for toolset definition.
|
||||
"""
|
||||
|
||||
import feature, property, generators
|
||||
from b2.util.utility import *
|
||||
from b2.util import set
|
||||
|
||||
__re_split_last_segment = re.compile (r'^(.+)\.([^\.])*')
|
||||
__re_two_ampersands = re.compile ('(&&)')
|
||||
__re_first_segment = re.compile ('([^.]*).*')
|
||||
__re_first_group = re.compile (r'[^.]*\.(.*)')
|
||||
|
||||
# Flag is a mechanism to set a value
|
||||
# A single toolset flag. Specifies that when certain
|
||||
# properties are in build property set, certain values
|
||||
# should be appended to some variable.
|
||||
#
|
||||
# A flag applies to a specific action in specific module.
|
||||
# The list of all flags for a module is stored, and each
|
||||
# flag further contains the name of the rule it applies
|
||||
# for,
|
||||
class Flag:
|
||||
|
||||
def __init__(self, variable_name, values, condition, rule = None):
|
||||
self.variable_name = variable_name
|
||||
self.values = values
|
||||
self.condition = condition
|
||||
self.rule = rule
|
||||
|
||||
def __str__(self):
|
||||
return("Flag(" + str(self.variable_name) + ", " + str(self.values) +\
|
||||
", " + str(self.condition) + ", " + str(self.rule) + ")")
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __module_flags, __flags, __stv
|
||||
|
||||
# Mapping from module name to a list of all flags that apply
|
||||
# to either that module directly, or to any rule in that module.
|
||||
# Each element of the list is Flag instance.
|
||||
# So, for module named xxx this might contain flags for 'xxx',
|
||||
# for 'xxx.compile', for 'xxx.compile.c++', etc.
|
||||
__module_flags = {}
|
||||
|
||||
# Mapping from specific rule or module name to a list of Flag instances
|
||||
# that apply to that name.
|
||||
# Say, it might contain flags for 'xxx.compile.c++'. If there are
|
||||
# entries for module name 'xxx', they are flags for 'xxx' itself,
|
||||
# not including any rules in that module.
|
||||
__flags = {}
|
||||
|
||||
# A cache for varaible settings. The key is generated from the rule name and the properties.
|
||||
__stv = {}
|
||||
|
||||
reset ()
|
||||
|
||||
# FIXME: --ignore-toolset-requirements
|
||||
# FIXME: using
|
||||
|
||||
def normalize_condition (property_sets):
|
||||
""" Expands subfeatures in each property set.
|
||||
e.g
|
||||
<toolset>gcc-3.2
|
||||
will be converted to
|
||||
<toolset>gcc/<toolset-version>3.2
|
||||
|
||||
TODO: does this one belong here or in feature?
|
||||
"""
|
||||
result = []
|
||||
for p in property_sets:
|
||||
split = feature.split (p)
|
||||
expanded = feature.expand_subfeatures (split)
|
||||
result.append ('/'.join (expanded))
|
||||
|
||||
return result
|
||||
|
||||
# FIXME push-checking-for-flags-module ....
|
||||
# FIXME: investigate existing uses of 'hack-hack' parameter
|
||||
# in jam code.
|
||||
|
||||
def flags (rule_or_module, variable_name, condition, values = []):
|
||||
""" Specifies the flags (variables) that must be set on targets under certain
|
||||
conditions, described by arguments.
|
||||
rule_or_module: If contains dot, should be a rule name.
|
||||
The flags will be applied when that rule is
|
||||
used to set up build actions.
|
||||
|
||||
If does not contain dot, should be a module name.
|
||||
The flags will be applied for all rules in that
|
||||
module.
|
||||
If module for rule is different from the calling
|
||||
module, an error is issued.
|
||||
|
||||
variable_name: Variable that should be set on target
|
||||
|
||||
condition A condition when this flag should be applied.
|
||||
Should be set of property sets. If one of
|
||||
those property sets is contained in build
|
||||
properties, the flag will be used.
|
||||
Implied values are not allowed:
|
||||
"<toolset>gcc" should be used, not just
|
||||
"gcc". Subfeatures, like in "<toolset>gcc-3.2"
|
||||
are allowed. If left empty, the flag will
|
||||
always used.
|
||||
|
||||
Propery sets may use value-less properties
|
||||
('<a>' vs. '<a>value') to match absent
|
||||
properties. This allows to separately match
|
||||
|
||||
<architecture>/<address-model>64
|
||||
<architecture>ia64/<address-model>
|
||||
|
||||
Where both features are optional. Without this
|
||||
syntax we'd be forced to define "default" value.
|
||||
|
||||
values: The value to add to variable. If <feature>
|
||||
is specified, then the value of 'feature'
|
||||
will be added.
|
||||
"""
|
||||
if condition and not replace_grist (condition, ''):
|
||||
# We have condition in the form '<feature>', that is, without
|
||||
# value. That's a previous syntax:
|
||||
#
|
||||
# flags gcc.link RPATH <dll-path> ;
|
||||
# for compatibility, convert it to
|
||||
# flags gcc.link RPATH : <dll-path> ;
|
||||
values = [ condition ]
|
||||
condition = None
|
||||
|
||||
if condition:
|
||||
property.validate_property_sets (condition)
|
||||
condition = normalize_condition ([condition])
|
||||
|
||||
__add_flag (rule_or_module, variable_name, condition, values)
|
||||
|
||||
def set_target_variables (manager, rule_or_module, targets, properties):
|
||||
"""
|
||||
"""
|
||||
key = rule_or_module + '.' + str (properties)
|
||||
settings = __stv.get (key, None)
|
||||
if not settings:
|
||||
settings = __set_target_variables_aux (manager, rule_or_module, properties)
|
||||
|
||||
__stv [key] = settings
|
||||
|
||||
if settings:
|
||||
for s in settings:
|
||||
for target in targets:
|
||||
manager.engine ().set_target_variable (target, s [0], s[1], True)
|
||||
|
||||
def find_property_subset (property_sets, properties):
|
||||
"""Returns the first element of 'property-sets' which is a subset of
|
||||
'properties', or an empty list if no such element exists."""
|
||||
|
||||
prop_keys = get_grist(properties)
|
||||
|
||||
for s in property_sets:
|
||||
# Handle value-less properties like '<architecture>' (compare with
|
||||
# '<architecture>x86').
|
||||
|
||||
set = feature.split(s)
|
||||
|
||||
# Find the set of features that
|
||||
# - have no property specified in required property set
|
||||
# - are omitted in build property set
|
||||
default_props = []
|
||||
for i in set:
|
||||
# If $(i) is a value-less property it should match default
|
||||
# value of an optional property. See the first line in the
|
||||
# example below:
|
||||
#
|
||||
# property set properties result
|
||||
# <a> <b>foo <b>foo match
|
||||
# <a> <b>foo <a>foo <b>foo no match
|
||||
# <a>foo <b>foo <b>foo no match
|
||||
# <a>foo <b>foo <a>foo <b>foo match
|
||||
if not (get_value(i) or get_grist(i) in prop_keys):
|
||||
default_props.append(i)
|
||||
|
||||
# FIXME: can this be expressed in a more pythonic way?
|
||||
has_all = 1
|
||||
for i in set:
|
||||
if i not in (properties + default_props):
|
||||
has_all = 0
|
||||
break
|
||||
if has_all:
|
||||
return s
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def register (toolset):
|
||||
""" Registers a new toolset.
|
||||
"""
|
||||
feature.extend('toolset', [toolset])
|
||||
|
||||
def inherit_generators (toolset, properties, base, generators_to_ignore = []):
|
||||
if not properties:
|
||||
properties = [replace_grist (toolset, '<toolset>')]
|
||||
|
||||
base_generators = generators.generators_for_toolset(base)
|
||||
|
||||
for g in base_generators:
|
||||
id = g.id()
|
||||
|
||||
if not id in generators_to_ignore:
|
||||
# Some generator names have multiple periods in their name, so
|
||||
# $(id:B=$(toolset)) doesn't generate the right new_id name.
|
||||
# e.g. if id = gcc.compile.c++, $(id:B=darwin) = darwin.c++,
|
||||
# which is not what we want. Manually parse the base and suffix
|
||||
# (if there's a better way to do this, I'd love to see it.)
|
||||
# See also register in module generators.
|
||||
(base, suffix) = split_action_id(id)
|
||||
|
||||
new_id = toolset + '.' + suffix
|
||||
|
||||
generators.register(g.clone(new_id, properties))
|
||||
|
||||
def inherit_flags(toolset, base, prohibited_properties = []):
|
||||
"""Brings all flag definitions from the 'base' toolset into the 'toolset'
|
||||
toolset. Flag definitions whose conditions make use of properties in
|
||||
'prohibited-properties' are ignored. Don't confuse property and feature, for
|
||||
example <debug-symbols>on and <debug-symbols>off, so blocking one of them does
|
||||
not block the other one.
|
||||
|
||||
The flag conditions are not altered at all, so if a condition includes a name,
|
||||
or version of a base toolset, it won't ever match the inheriting toolset. When
|
||||
such flag settings must be inherited, define a rule in base toolset module and
|
||||
call it as needed."""
|
||||
for f in __module_flags.get(base, []):
|
||||
|
||||
if not f.condition or set.difference(f.condition, prohibited_properties):
|
||||
match = __re_first_group.match(f.rule)
|
||||
rule_ = None
|
||||
if match:
|
||||
rule_ = match.group(1)
|
||||
|
||||
new_rule_or_module = ''
|
||||
|
||||
if rule_:
|
||||
new_rule_or_module = toolset + '.' + rule_
|
||||
else:
|
||||
new_rule_or_module = toolset
|
||||
|
||||
__add_flag (new_rule_or_module, f.variable_name, f.condition, f.values)
|
||||
|
||||
def inherit_rules (toolset, base):
|
||||
pass
|
||||
# FIXME: do something about this.
|
||||
# base_generators = generators.generators_for_toolset (base)
|
||||
|
||||
# import action
|
||||
|
||||
# ids = []
|
||||
# for g in base_generators:
|
||||
# (old_toolset, id) = split_action_id (g.id ())
|
||||
# ids.append (id) ;
|
||||
|
||||
# new_actions = []
|
||||
|
||||
# engine = get_manager().engine()
|
||||
# FIXME: do this!
|
||||
# for action in engine.action.values():
|
||||
# pass
|
||||
# (old_toolset, id) = split_action_id(action.action_name)
|
||||
#
|
||||
# if old_toolset == base:
|
||||
# new_actions.append ((id, value [0], value [1]))
|
||||
#
|
||||
# for a in new_actions:
|
||||
# action.register (toolset + '.' + a [0], a [1], a [2])
|
||||
|
||||
# TODO: how to deal with this?
|
||||
# IMPORT $(base) : $(rules) : $(toolset) : $(rules) : localized ;
|
||||
# # Import the rules to the global scope
|
||||
# IMPORT $(toolset) : $(rules) : : $(toolset).$(rules) ;
|
||||
# }
|
||||
#
|
||||
|
||||
######################################################################################
|
||||
# Private functions
|
||||
|
||||
def __set_target_variables_aux (manager, rule_or_module, properties):
|
||||
""" Given a rule name and a property set, returns a list of tuples of
|
||||
variables names and values, which must be set on targets for that
|
||||
rule/properties combination.
|
||||
"""
|
||||
result = []
|
||||
|
||||
for f in __flags.get(rule_or_module, []):
|
||||
|
||||
if not f.condition or find_property_subset (f.condition, properties):
|
||||
processed = []
|
||||
for v in f.values:
|
||||
# The value might be <feature-name> so needs special
|
||||
# treatment.
|
||||
processed += __handle_flag_value (manager, v, properties)
|
||||
|
||||
for r in processed:
|
||||
result.append ((f.variable_name, r))
|
||||
|
||||
# strip away last dot separated part and recurse.
|
||||
next = __re_split_last_segment.match(rule_or_module)
|
||||
|
||||
if next:
|
||||
result.extend(__set_target_variables_aux(
|
||||
manager, next.group(1), properties))
|
||||
|
||||
return result
|
||||
|
||||
def __handle_flag_value (manager, value, properties):
|
||||
result = []
|
||||
|
||||
if get_grist (value):
|
||||
matches = property.select (value, properties)
|
||||
for p in matches:
|
||||
att = feature.attributes (get_grist (p))
|
||||
|
||||
ungristed = replace_grist (p, '')
|
||||
|
||||
if 'dependency' in att:
|
||||
# the value of a dependency feature is a target
|
||||
# and must be actualized
|
||||
# FIXME: verify that 'find' actually works, ick!
|
||||
result.append (manager.targets ().find (ungristed).actualize ())
|
||||
|
||||
elif 'path' in att or 'free' in att:
|
||||
values = []
|
||||
|
||||
# Treat features with && in the value
|
||||
# specially -- each &&-separated element is considered
|
||||
# separate value. This is needed to handle searched
|
||||
# libraries, which must be in specific order.
|
||||
if not __re_two_ampersands.search (ungristed):
|
||||
values.append (ungristed)
|
||||
|
||||
else:
|
||||
values.extend(value.split ('&&'))
|
||||
|
||||
result.extend(values)
|
||||
else:
|
||||
result.append (ungristed)
|
||||
else:
|
||||
result.append (value)
|
||||
|
||||
return result
|
||||
|
||||
def __add_flag (rule_or_module, variable_name, condition, values):
|
||||
""" Adds a new flag setting with the specified values.
|
||||
Does no checking.
|
||||
"""
|
||||
f = Flag(variable_name, values, condition, rule_or_module)
|
||||
|
||||
# Grab the name of the module
|
||||
m = __re_first_segment.match (rule_or_module)
|
||||
assert m
|
||||
module = m.group(1)
|
||||
|
||||
__module_flags.setdefault(m, []).append(f)
|
||||
__flags.setdefault(rule_or_module, []).append(f)
|
||||
|
||||
def requirements():
|
||||
"""Return the list of global 'toolset requirements'.
|
||||
Those requirements will be automatically added to the requirements of any main target."""
|
||||
return __requirements
|
||||
|
||||
def add_requirements(requirements):
|
||||
"""Adds elements to the list of global 'toolset requirements'. The requirements
|
||||
will be automatically added to the requirements for all main targets, as if
|
||||
they were specified literally. For best results, all requirements added should
|
||||
be conditional or indirect conditional."""
|
||||
|
||||
# FIXME:
|
||||
#if ! $(.ignore-requirements)
|
||||
#{
|
||||
__requirements.extend(requirements)
|
||||
#}
|
||||
|
||||
# Make toolset 'toolset', defined in a module of the same name,
|
||||
# inherit from 'base'
|
||||
# 1. The 'init' rule from 'base' is imported into 'toolset' with full
|
||||
# name. Another 'init' is called, which forwards to the base one.
|
||||
# 2. All generators from 'base' are cloned. The ids are adjusted and
|
||||
# <toolset> property in requires is adjusted too
|
||||
# 3. All flags are inherited
|
||||
# 4. All rules are imported.
|
||||
def inherit(toolset, base):
|
||||
get_manager().projects().load_module(base, []);
|
||||
|
||||
inherit_generators(toolset, [], base)
|
||||
inherit_flags(toolset, base)
|
||||
inherit_rules(toolset, base)
|
||||
292
src/build/type.py
Normal file
292
src/build/type.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# Status: ported.
|
||||
# Base revision: 45462.
|
||||
|
||||
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
|
||||
|
||||
import re
|
||||
import os
|
||||
import os.path
|
||||
from b2.util.utility import replace_grist, os_name
|
||||
from b2.exceptions import *
|
||||
from b2.build import feature, property, scanner
|
||||
|
||||
__re_hyphen = re.compile ('-')
|
||||
|
||||
def __register_features ():
|
||||
""" Register features need by this module.
|
||||
"""
|
||||
# The feature is optional so that it is never implicitly added.
|
||||
# It's used only for internal purposes, and in all cases we
|
||||
# want to explicitly use it.
|
||||
feature.feature ('target-type', [], ['composite', 'optional'])
|
||||
feature.feature ('main-target-type', [], ['optional', 'incidental'])
|
||||
feature.feature ('base-target-type', [], ['composite', 'optional', 'free'])
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
Note that this must be called _after_ resetting the module 'feature'.
|
||||
"""
|
||||
global __prefixes_suffixes, __suffixes_to_types, __types, __rule_names_to_types, __target_suffixes_cache
|
||||
|
||||
__register_features ()
|
||||
|
||||
# Stores suffixes for generated targets.
|
||||
__prefixes_suffixes = [property.PropertyMap(), property.PropertyMap()]
|
||||
|
||||
# Maps suffixes to types
|
||||
__suffixes_to_types = {}
|
||||
|
||||
# A map with all the registered types, indexed by the type name
|
||||
# Each entry is a dictionary with following values:
|
||||
# 'base': the name of base type or None if type has no base
|
||||
# 'derived': a list of names of type which derive from this one
|
||||
# 'scanner': the scanner class registered for this type, if any
|
||||
__types = {}
|
||||
|
||||
# Caches suffixes for targets with certain properties.
|
||||
__target_suffixes_cache = {}
|
||||
|
||||
reset ()
|
||||
|
||||
|
||||
def register (type, suffixes = [], base_type = None):
|
||||
""" Registers a target type, possibly derived from a 'base-type'.
|
||||
If 'suffixes' are provided, they list all the suffixes that mean a file is of 'type'.
|
||||
Also, the first element gives the suffix to be used when constructing and object of
|
||||
'type'.
|
||||
type: a string
|
||||
suffixes: None or a sequence of strings
|
||||
base_type: None or a string
|
||||
"""
|
||||
# Type names cannot contain hyphens, because when used as
|
||||
# feature-values they will be interpreted as composite features
|
||||
# which need to be decomposed.
|
||||
if __re_hyphen.search (type):
|
||||
raise BaseException ('type name "%s" contains a hyphen' % type)
|
||||
|
||||
if __types.has_key (type):
|
||||
raise BaseException ('Type "%s" is already registered.' % type)
|
||||
|
||||
entry = {}
|
||||
entry ['base'] = base_type
|
||||
entry ['derived'] = []
|
||||
entry ['scanner'] = None
|
||||
__types [type] = entry
|
||||
|
||||
if base_type:
|
||||
__types [base_type]['derived'].append (type)
|
||||
|
||||
if len (suffixes) > 0:
|
||||
# Generated targets of 'type' will use the first of 'suffixes'
|
||||
# (this may be overriden)
|
||||
set_generated_target_suffix (type, [], suffixes [0])
|
||||
|
||||
# Specify mapping from suffixes to type
|
||||
register_suffixes (suffixes, type)
|
||||
|
||||
feature.extend('target-type', [type])
|
||||
feature.extend('main-target-type', [type])
|
||||
feature.extend('base-target-type', [type])
|
||||
|
||||
if base_type:
|
||||
feature.compose ('<target-type>' + type, replace_grist (base_type, '<base-target-type>'))
|
||||
feature.compose ('<base-target-type>' + type, '<base-target-type>' + base_type)
|
||||
|
||||
# FIXME: resolving recursive dependency.
|
||||
from b2.manager import get_manager
|
||||
get_manager().projects().project_rules().add_rule_for_type(type)
|
||||
|
||||
def register_suffixes (suffixes, type):
|
||||
""" Specifies that targets with suffix from 'suffixes' have the type 'type'.
|
||||
If a different type is already specified for any of syffixes, issues an error.
|
||||
"""
|
||||
for s in suffixes:
|
||||
if __suffixes_to_types.has_key (s):
|
||||
old_type = __suffixes_to_types [s]
|
||||
if old_type != type:
|
||||
raise BaseException ('Attempting to specify type for suffix "%s"\nOld type: "%s", New type "%s"' % (s, old_type, type))
|
||||
else:
|
||||
__suffixes_to_types [s] = type
|
||||
|
||||
def registered (type):
|
||||
""" Returns true iff type has been registered.
|
||||
"""
|
||||
return __types.has_key (type)
|
||||
|
||||
def validate (type):
|
||||
""" Issues an error if 'type' is unknown.
|
||||
"""
|
||||
if not registered (type):
|
||||
raise BaseException ("Unknown target type '%s'" % type)
|
||||
|
||||
def set_scanner (type, scanner):
|
||||
""" Sets a scanner class that will be used for this 'type'.
|
||||
"""
|
||||
validate (type)
|
||||
__types [type]['scanner'] = scanner
|
||||
|
||||
def get_scanner (type, prop_set):
|
||||
""" Returns a scanner instance appropriate to 'type' and 'property_set'.
|
||||
"""
|
||||
if registered (type):
|
||||
scanner_type = __types [type]['scanner']
|
||||
if scanner_type:
|
||||
return scanner.get (scanner_type, prop_set.raw ())
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def all_bases (type):
|
||||
""" Returns type and all of its bases, in the order of their distance from type.
|
||||
"""
|
||||
result = []
|
||||
while type:
|
||||
result.append (type)
|
||||
type = __types [type]['base']
|
||||
|
||||
return result
|
||||
|
||||
def all_derived (type):
|
||||
""" Returns type and all classes that derive from it, in the order of their distance from type.
|
||||
"""
|
||||
result = [type]
|
||||
for d in __types [type]['derived']:
|
||||
result.extend (all_derived (d))
|
||||
|
||||
return result
|
||||
|
||||
def is_derived (type, base):
|
||||
""" Returns true if 'type' is 'base' or has 'base' as its direct or indirect base.
|
||||
"""
|
||||
# TODO: this isn't very efficient, especially for bases close to type
|
||||
if base in all_bases (type):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_subtype (type, base):
|
||||
""" Same as is_derived. Should be removed.
|
||||
"""
|
||||
# TODO: remove this method
|
||||
return is_derived (type, base)
|
||||
|
||||
def set_generated_target_suffix (type, properties, suffix):
|
||||
""" Sets a target suffix that should be used when generating target
|
||||
of 'type' with the specified properties. Can be called with
|
||||
empty properties if no suffix for 'type' was specified yet.
|
||||
This does not automatically specify that files 'suffix' have
|
||||
'type' --- two different types can use the same suffix for
|
||||
generating, but only one type should be auto-detected for
|
||||
a file with that suffix. User should explicitly specify which
|
||||
one.
|
||||
|
||||
The 'suffix' parameter can be empty string ("") to indicate that
|
||||
no suffix should be used.
|
||||
"""
|
||||
set_generated_target_ps(1, type, properties, suffix)
|
||||
|
||||
|
||||
|
||||
def change_generated_target_suffix (type, properties, suffix):
|
||||
""" Change the suffix previously registered for this type/properties
|
||||
combination. If suffix is not yet specified, sets it.
|
||||
"""
|
||||
change_generated_target_ps(1, type, properties, suffix)
|
||||
|
||||
def generated_target_suffix(type, properties):
|
||||
return generated_target_ps(1, type, properties)
|
||||
|
||||
# Sets a target prefix that should be used when generating targets of 'type'
|
||||
# with the specified properties. Can be called with empty properties if no
|
||||
# prefix for 'type' has been specified yet.
|
||||
#
|
||||
# The 'prefix' parameter can be empty string ("") to indicate that no prefix
|
||||
# should be used.
|
||||
#
|
||||
# Usage example: library names use the "lib" prefix on unix.
|
||||
def set_generated_target_prefix(type, properties, prefix):
|
||||
set_generated_target_ps(0, type, properties, prefix)
|
||||
|
||||
# Change the prefix previously registered for this type/properties combination.
|
||||
# If prefix is not yet specified, sets it.
|
||||
def change_generated_target_prefix(type, properties, prefix):
|
||||
change_generated_target_ps(0, type, properties, prefix)
|
||||
|
||||
def generated_target_prefix(type, properties):
|
||||
return generated_target_ps(0, type, properties)
|
||||
|
||||
def set_generated_target_ps(is_suffix, type, properties, val):
|
||||
properties.append ('<target-type>' + type)
|
||||
__prefixes_suffixes[is_suffix].insert (properties, val)
|
||||
|
||||
def change_generated_target_ps(is_suffix, type, properties, val):
|
||||
properties.append ('<target-type>' + type)
|
||||
prev = __prefixes_suffixes[is_suffix].find_replace(properties, val)
|
||||
if not prev:
|
||||
set_generated_target_ps(is_suffix, type, properties, val)
|
||||
|
||||
# Returns either prefix or suffix (as indicated by 'is_suffix') that should be used
|
||||
# when generating a target of 'type' with the specified properties.
|
||||
# If no prefix/suffix is specified for 'type', returns prefix/suffix for
|
||||
# base type, if any.
|
||||
def generated_target_ps_real(is_suffix, type, properties):
|
||||
|
||||
result = ''
|
||||
found = False
|
||||
while type and not found:
|
||||
result = __prefixes_suffixes[is_suffix].find (['<target-type>' + type] + properties)
|
||||
|
||||
# Note that if the string is empty (""), but not null, we consider
|
||||
# suffix found. Setting prefix or suffix to empty string is fine.
|
||||
if result:
|
||||
found = True
|
||||
|
||||
type = __types [type]['base']
|
||||
|
||||
if not result:
|
||||
result = ''
|
||||
return result
|
||||
|
||||
def generated_target_ps(is_suffix, type, prop_set):
|
||||
""" Returns suffix that should be used when generating target of 'type',
|
||||
with the specified properties. If not suffix were specified for
|
||||
'type', returns suffix for base type, if any.
|
||||
"""
|
||||
key = str(is_suffix) + type + str(prop_set)
|
||||
v = __target_suffixes_cache.get (key, None)
|
||||
|
||||
if not v:
|
||||
v = generated_target_ps_real(is_suffix, type, prop_set.raw())
|
||||
__target_suffixes_cache [key] = v
|
||||
|
||||
return v
|
||||
|
||||
def type(filename):
|
||||
""" Returns file type given it's name. If there are several dots in filename,
|
||||
tries each suffix. E.g. for name of "file.so.1.2" suffixes "2", "1", and
|
||||
"so" will be tried.
|
||||
"""
|
||||
while 1:
|
||||
filename, suffix = os.path.splitext (filename)
|
||||
if not suffix: return None
|
||||
suffix = suffix[1:]
|
||||
|
||||
if __suffixes_to_types.has_key(suffix):
|
||||
return __suffixes_to_types[suffix]
|
||||
|
||||
# NOTE: moved from tools/types/register
|
||||
def register_type (type, suffixes, base_type = None, os = []):
|
||||
""" Register the given type on the specified OSes, or on remaining OSes
|
||||
if os is not specified. This rule is injected into each of the type
|
||||
modules for the sake of convenience.
|
||||
"""
|
||||
if registered (type):
|
||||
return
|
||||
|
||||
if not os or os_name () in os:
|
||||
register (type, suffixes, base_type)
|
||||
1051
src/build/virtual_target.py
Normal file
1051
src/build/virtual_target.py
Normal file
File diff suppressed because it is too large
Load Diff
437
src/build_system.py
Normal file
437
src/build_system.py
Normal file
@@ -0,0 +1,437 @@
|
||||
# Status: being ported by Vladimir Prus.
|
||||
|
||||
# Copyright 2003, 2005 Dave Abrahams
|
||||
# Copyright 2006 Rene Rivera
|
||||
# Copyright 2003, 2004, 2005, 2006, 2007 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build.engine import Engine
|
||||
from b2.manager import Manager
|
||||
from b2.util.path import glob
|
||||
from b2.build import feature, property_set
|
||||
import b2.build.virtual_target
|
||||
from b2.build.targets import ProjectTarget
|
||||
from b2.util.sequence import unique
|
||||
import b2.build.build_request
|
||||
from b2.build.errors import ExceptionWithUserContext
|
||||
import b2.tools.common
|
||||
|
||||
import bjam
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# FIXME:
|
||||
# Returns the location of the build system. The primary use case
|
||||
# is building Boost, where it's sometimes needed to get location
|
||||
# of other components (like BoostBook files), and it's convenient
|
||||
# to use location relatively to Boost.Build path.
|
||||
#rule location ( )
|
||||
#{
|
||||
# local r = [ modules.binding build-system ] ;
|
||||
# return $(r:P) ;
|
||||
#}
|
||||
|
||||
# FIXME:
|
||||
|
||||
def get_boolean_option(name):
|
||||
match = "--" + name
|
||||
if match in argv:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def get_string_option(name):
|
||||
match = "--" + name + "="
|
||||
|
||||
for arg in argv:
|
||||
if arg.startswith(match):
|
||||
return arg[len(match):]
|
||||
return None
|
||||
|
||||
def home_directories():
|
||||
if os.name == "nt":
|
||||
result = set()
|
||||
try:
|
||||
result.add(os.environ['HOMEDRIVE'] + os.environ['HOMEPATH'])
|
||||
result.add(os.environ['HOME'])
|
||||
result.add(os.environ['USERPROFILE'])
|
||||
except KeyError:
|
||||
pass
|
||||
return list(result)
|
||||
else:
|
||||
return [os.environ['HOME']]
|
||||
|
||||
ignore_config = 0
|
||||
debug_config = 0
|
||||
|
||||
def load_config(manager, basename, path):
|
||||
"""Unless ignore-config is set, search configuration
|
||||
basename.jam in path and loads it. The jamfile module
|
||||
for that file will be loaded 'basename'."""
|
||||
|
||||
if not ignore_config:
|
||||
found = glob(path, [basename + ".jam"])
|
||||
if found:
|
||||
found = found[0]
|
||||
if debug_config:
|
||||
print "notice: searching '%s' for '%s.jam'" % (path, basename)
|
||||
if found:
|
||||
print "notice: loading %s.jam from %s" % (basename, found)
|
||||
|
||||
manager.projects().load_standalone(basename, found)
|
||||
|
||||
def main():
|
||||
|
||||
global argv
|
||||
argv = bjam.variable("ARGV")
|
||||
|
||||
# FIXME: document this option.
|
||||
if "--profiling" in argv:
|
||||
import cProfile
|
||||
import pstats
|
||||
cProfile.runctx('main_real()', globals(), locals(), "stones.prof")
|
||||
|
||||
stats = pstats.Stats("stones.prof")
|
||||
stats.strip_dirs()
|
||||
stats.sort_stats('time', 'calls')
|
||||
stats.print_callers(20)
|
||||
else:
|
||||
main_real()
|
||||
|
||||
def main_real():
|
||||
|
||||
global ignore_config
|
||||
global debug_config
|
||||
|
||||
boost_build_path = bjam.variable("BOOST_BUILD_PATH")
|
||||
|
||||
engine = Engine()
|
||||
|
||||
global_build_dir = get_string_option("build-dir")
|
||||
debug_config = get_boolean_option("debug-configuration")
|
||||
|
||||
manager = Manager(engine, global_build_dir)
|
||||
|
||||
# This module defines types and generator and what not,
|
||||
# and depends on manager's existence
|
||||
import b2.tools.builtin
|
||||
|
||||
|
||||
# Check if we can load 'test-config.jam'. If we can, load it and
|
||||
# ignore user configs.
|
||||
|
||||
test_config = glob(boost_build_path, ["test-config.jam"])
|
||||
if test_config:
|
||||
test_config = test_config[0]
|
||||
|
||||
if test_config:
|
||||
if debug_config:
|
||||
print "notice: loading testing-config.jam from '%s'" % test_config
|
||||
print "notice: user-config.jam and site-config.jam will be ignored"
|
||||
|
||||
manager.projects().load_standalone("test-config", test_config)
|
||||
|
||||
|
||||
ignore_config = test_config or get_boolean_option("ignore-config")
|
||||
user_path = home_directories() + boost_build_path
|
||||
|
||||
site_path = ["/etc"] + user_path
|
||||
if bjam.variable("OS") in ["NT", "CYGWIN"]:
|
||||
site_path = [os.environ("SystemRoot")] + user_path
|
||||
|
||||
load_config(manager, "site-config", site_path)
|
||||
|
||||
user_config_path = get_string_option("user-config")
|
||||
if not user_config_path:
|
||||
user_config_path = os.environ.get("BOOST_BUILD_USER_CONFIG")
|
||||
|
||||
if user_config_path:
|
||||
if debug_config:
|
||||
print "Loading explicitly specifier user configuration file:"
|
||||
print " %s" % user_config_path
|
||||
|
||||
manager.projects().load_standalone("user-config", user_config_path)
|
||||
|
||||
else:
|
||||
load_config(manager, "user-config", user_path)
|
||||
|
||||
|
||||
# FIXME:
|
||||
## #
|
||||
## # Autoconfigure toolsets based on any instances of --toolset=xx,yy,...zz or
|
||||
## # toolset=xx,yy,...zz in the command line
|
||||
## #
|
||||
## local option-toolsets = [ regex.split-list [ MATCH ^--toolset=(.*) : $(argv) ] : "," ] ;
|
||||
## local feature-toolsets = [ regex.split-list [ MATCH ^toolset=(.*) : $(argv) ] : "," ] ;
|
||||
|
||||
## # if the user specified --toolset=..., we need to add toolset=... to
|
||||
## # the build request
|
||||
## local extra-build-request ;
|
||||
|
||||
extra_build_request = []
|
||||
|
||||
## if ! $(ignore-config)
|
||||
## {
|
||||
## for local t in $(option-toolsets) $(feature-toolsets)
|
||||
## {
|
||||
## # Parse toolset-version/properties
|
||||
## local (t-v,t,v) = [ MATCH (([^-/]+)-?([^/]+)?)/?.* : $(t) ] ;
|
||||
## local toolset-version = $((t-v,t,v)[1]) ;
|
||||
## local toolset = $((t-v,t,v)[2]) ;
|
||||
## local version = $((t-v,t,v)[3]) ;
|
||||
|
||||
## if $(debug-config)
|
||||
## {
|
||||
## ECHO notice: [cmdline-cfg] Detected command-line request for
|
||||
## $(toolset-version): toolset= \"$(toolset)\" "version= \""$(version)\" ;
|
||||
## }
|
||||
|
||||
## local known ;
|
||||
|
||||
## # if the toolset isn't known, configure it now.
|
||||
## if $(toolset) in [ feature.values <toolset> ]
|
||||
## {
|
||||
## known = true ;
|
||||
## }
|
||||
|
||||
## if $(known) && $(version)
|
||||
## && ! [ feature.is-subvalue toolset : $(toolset) : version : $(version) ]
|
||||
## {
|
||||
## known = ;
|
||||
## }
|
||||
|
||||
## if ! $(known)
|
||||
## {
|
||||
## if $(debug-config)
|
||||
## {
|
||||
## ECHO notice: [cmdline-cfg] toolset $(toolset-version)
|
||||
## not previously configured; configuring now ;
|
||||
## }
|
||||
## toolset.using $(toolset) : $(version) ;
|
||||
## }
|
||||
## else
|
||||
## {
|
||||
## if $(debug-config)
|
||||
## {
|
||||
## ECHO notice: [cmdline-cfg] toolset $(toolset-version) already configured ;
|
||||
## }
|
||||
## }
|
||||
|
||||
## # make sure we get an appropriate property into the build request in
|
||||
## # case the user used the "--toolset=..." form
|
||||
## if ! $(t) in $(argv)
|
||||
## && ! $(t) in $(feature-toolsets)
|
||||
## {
|
||||
## if $(debug-config)
|
||||
## {
|
||||
## ECHO notice: [cmdline-cfg] adding toolset=$(t) "to build request." ;
|
||||
## }
|
||||
## extra-build-request += toolset=$(t) ;
|
||||
## }
|
||||
## }
|
||||
## }
|
||||
|
||||
|
||||
# FIXME:
|
||||
## if USER_MODULE in [ RULENAMES ]
|
||||
## {
|
||||
## USER_MODULE site-config user-config ;
|
||||
## }
|
||||
|
||||
if get_boolean_option("version"):
|
||||
# FIXME: Move to a separate module. Include bjam
|
||||
# verision.
|
||||
print "Boost.Build M15 (Python port in development)"
|
||||
sys.exit(0)
|
||||
|
||||
b2.tools.common.init(manager)
|
||||
|
||||
# We always load project in "." so that 'use-project' directives has
|
||||
# any chance of been seen. Otherwise, we won't be able to refer to
|
||||
# subprojects using target ids.
|
||||
|
||||
current_project = None
|
||||
projects = manager.projects()
|
||||
if projects.find(".", "."):
|
||||
current_project = projects.target(projects.load("."))
|
||||
|
||||
# FIXME: revive this logic, when loading of gcc works
|
||||
if not feature.values("<toolset>") and not ignore_config and 0:
|
||||
default_toolset = "gcc" ;
|
||||
if bjam.variable("OS") == "NT":
|
||||
default_toolset = "msvc"
|
||||
|
||||
print "warning: No toolsets are configured." ;
|
||||
print "warning: Configuring default toolset '%s'" % default_toolset
|
||||
print "warning: If the default is wrong, you may not be able to build C++ programs."
|
||||
print "warning: Use the \"--toolset=xxxxx\" option to override our guess."
|
||||
print "warning: For more configuration options, please consult"
|
||||
print "warning: http://boost.org/boost-build2/doc/html/bbv2/advanced/configuration.html"
|
||||
|
||||
projects.project_rules().using([default_toolset])
|
||||
|
||||
(target_ids, properties) = b2.build.build_request.from_command_line(
|
||||
argv[1:] + extra_build_request)
|
||||
|
||||
if properties:
|
||||
expanded = b2.build.build_request.expand_no_defaults(properties)
|
||||
xexpanded = []
|
||||
for e in expanded:
|
||||
xexpanded.append(property_set.create(feature.split(e)))
|
||||
expanded = xexpanded
|
||||
else:
|
||||
expanded = [property_set.empty()]
|
||||
|
||||
targets = []
|
||||
|
||||
clean = get_boolean_option("clean")
|
||||
clean_all = get_boolean_option("clean-all")
|
||||
|
||||
|
||||
bjam_targets = []
|
||||
|
||||
# Given a target id, try to find and return corresponding target.
|
||||
# This is only invoked when there's no Jamfile in "."
|
||||
# This code somewhat duplicates code in project-target.find but we can't reuse
|
||||
# that code without project-targets instance.
|
||||
def find_target (target_id):
|
||||
split = target_id.split("//")
|
||||
pm = None
|
||||
if len(split) > 1:
|
||||
pm = projects.find(split[0], ".")
|
||||
else:
|
||||
pm = projects.find(target_id, ".")
|
||||
|
||||
result = None
|
||||
if pm:
|
||||
result = projects.target(pm)
|
||||
|
||||
if len(split) > 1:
|
||||
result = result.find(split[1])
|
||||
|
||||
if not current_project and not target_ids:
|
||||
print "error: no Jamfile in current directory found, and no target references specified."
|
||||
sys.exit(1)
|
||||
|
||||
for id in target_ids:
|
||||
if id == "clean":
|
||||
clean = 1
|
||||
else:
|
||||
t = None
|
||||
if current_project:
|
||||
t = current_project.find(id, no_error=1)
|
||||
else:
|
||||
t = find_target(id)
|
||||
|
||||
if not t:
|
||||
print "notice: could not find main target '%s'" % id
|
||||
print "notice: assuming it's a name of file to create " ;
|
||||
bjam_targets.append(id)
|
||||
else:
|
||||
targets.append(t)
|
||||
|
||||
if not targets:
|
||||
targets = [projects.target(projects.module_name("."))]
|
||||
|
||||
virtual_targets = []
|
||||
|
||||
# Virtual targets obtained when building main targets references on
|
||||
# the command line. When running
|
||||
#
|
||||
# bjam --clean main_target
|
||||
#
|
||||
# we want to clean the files that belong only to that main target,
|
||||
# so we need to record which targets are produced.
|
||||
results_of_main_targets = []
|
||||
|
||||
for p in expanded:
|
||||
manager.set_command_line_free_features(property_set.create(p.free()))
|
||||
|
||||
for t in targets:
|
||||
try:
|
||||
g = t.generate(p)
|
||||
if not isinstance(t, ProjectTarget):
|
||||
results_of_main_targets.extend(g.targets())
|
||||
virtual_targets.extend(g.targets())
|
||||
except ExceptionWithUserContext, e:
|
||||
e.report()
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
# The cleaning is tricky. Say, if
|
||||
# user says:
|
||||
#
|
||||
# bjam --clean foo
|
||||
#
|
||||
# where 'foo' is a directory, then we want to clean targets
|
||||
# which are in 'foo' or in any children Jamfiles, but not in any
|
||||
# unrelated Jamfiles. So, we collect the list of project under which
|
||||
# cleaning is allowed.
|
||||
#
|
||||
projects_to_clean = []
|
||||
targets_to_clean = []
|
||||
if clean or clean_all:
|
||||
for t in targets:
|
||||
if isinstance(t, ProjectTarget):
|
||||
projects_to_clean.append(t.project_module())
|
||||
|
||||
for t in results_of_main_targets:
|
||||
# Don't include roots or sources.
|
||||
targets_to_clean += b2.build.virtual_target.traverse(t)
|
||||
|
||||
targets_to_clean = unique(targets_to_clean)
|
||||
|
||||
is_child_cache_ = {}
|
||||
|
||||
# Returns 'true' if 'project' is a child of 'current-project',
|
||||
# possibly indirect, or is equal to 'project'.
|
||||
# Returns 'false' otherwise.
|
||||
def is_child (project):
|
||||
|
||||
r = is_child_cache_.get(project, None)
|
||||
if not r:
|
||||
if project in projects_to_clean:
|
||||
r = 1
|
||||
else:
|
||||
parent = manager.projects().attribute(project, "parent-module")
|
||||
if parent and parent != "user-config":
|
||||
r = is_child(parent)
|
||||
else:
|
||||
r = 0
|
||||
|
||||
is_child_cache_[project] = r
|
||||
|
||||
return r
|
||||
|
||||
actual_targets = []
|
||||
for t in virtual_targets:
|
||||
actual_targets.append(t.actualize())
|
||||
|
||||
|
||||
bjam.call("NOTFILE", "all")
|
||||
bjam.call("DEPENDS", "all", actual_targets)
|
||||
|
||||
if bjam_targets:
|
||||
bjam.call("UPDATE", ["<e>%s" % x for x in bjam_targets])
|
||||
elif clean_all:
|
||||
bjam.call("UPDATE", "clean-all")
|
||||
elif clean:
|
||||
to_clean = []
|
||||
for t in manager.virtual_targets().all_targets():
|
||||
p = t.project()
|
||||
|
||||
# Remove only derived targets.
|
||||
if t.action() and \
|
||||
(t in targets_to_clean or is_child(p.project_module())):
|
||||
to_clean.append(t)
|
||||
|
||||
to_clean_actual = [t.actualize() for t in to_clean]
|
||||
manager.engine().set_update_action('common.Clean', 'clean',
|
||||
to_clean_actual, None)
|
||||
|
||||
bjam.call("UPDATE", "clean")
|
||||
|
||||
else:
|
||||
bjam.call("UPDATE", "all")
|
||||
44
src/exceptions.py
Normal file
44
src/exceptions.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright Pedro Ferreira 2005. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# TODO: add more exception types?
|
||||
|
||||
class BaseException (Exception):
|
||||
def __init__ (self, message = ''): Exception.__init__ (self, message)
|
||||
|
||||
class UserError (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class FeatureConflict (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class InvalidSource (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class InvalidFeature (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class InvalidProperty (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class InvalidValue (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class InvalidAttribute (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class AlreadyDefined (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class IllegalOperation (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class Recursion (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class NoBestMatchingAlternative (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
|
||||
class NoAction (BaseException):
|
||||
def __init__ (self, message = ''): BaseException.__init__ (self, message)
|
||||
@@ -111,6 +111,8 @@ IMPORT modules : import : : import ;
|
||||
BOOST_BUILD_PATH += $(whereami:D)/$(subdirs) ;
|
||||
|
||||
modules.poke .ENVIRON : BOOST_BUILD_PATH : $(BOOST_BUILD_PATH) ;
|
||||
|
||||
modules.poke : EXTRA_PYTHONPATH : $(whereami) ;
|
||||
}
|
||||
|
||||
# Reload the modules, to clean up things. The modules module can tolerate
|
||||
@@ -129,11 +131,110 @@ local dont-build = [ option.process ] ;
|
||||
#
|
||||
if ! $(dont-build)
|
||||
{
|
||||
# Allow users to override the build system file from the
|
||||
# command-line (mostly for testing)
|
||||
local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
|
||||
build-system ?= build-system ;
|
||||
if ! [ MATCH (--python) : $(ARGV) ]
|
||||
{
|
||||
# Allow users to override the build system file from the
|
||||
# command-line (mostly for testing)
|
||||
local build-system = [ MATCH --build-system=(.*) : $(ARGV) ] ;
|
||||
build-system ?= build-system ;
|
||||
|
||||
# Use last element in case of multiple command-line options
|
||||
import $(build-system[-1]) ;
|
||||
# Use last element in case of multiple command-line options
|
||||
import $(build-system[-1]) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
ECHO "Boost.Build V2 Python port (experimental)" ;
|
||||
|
||||
# Define additional interface that is exposed to Python code. Python code will
|
||||
# also have access to select bjam builtins in the 'bjam' module, but some
|
||||
# things are easier to define outside C.
|
||||
module python_interface
|
||||
{
|
||||
rule load ( module-name : location )
|
||||
{
|
||||
USER_MODULE $(module-name) ;
|
||||
# Make all rules in the loaded module available in
|
||||
# the global namespace, so that we don't have
|
||||
# to bother specifying "right" module when calling
|
||||
# from Python.
|
||||
module $(module-name)
|
||||
{
|
||||
__name__ = $(1) ;
|
||||
include $(2) ;
|
||||
local rules = [ RULENAMES $(1) ] ;
|
||||
IMPORT $(1) : $(rules) : $(1) : $(1).$(rules) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule peek ( module-name ? : variables + )
|
||||
{
|
||||
module $(<)
|
||||
{
|
||||
return $($(>)) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule set-variable ( module-name : name : value * )
|
||||
{
|
||||
module $(<)
|
||||
{
|
||||
$(>) = $(3) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule set-top-level-targets ( targets * )
|
||||
{
|
||||
DEPENDS all : $(targets) ;
|
||||
}
|
||||
|
||||
rule set-update-action ( action : targets * : sources * : properties * )
|
||||
{
|
||||
$(action) $(targets) : $(sources) : $(properties) ;
|
||||
}
|
||||
|
||||
rule set-target-variable ( targets + : variable : value * : append ? )
|
||||
{
|
||||
if $(append)
|
||||
{
|
||||
$(variable) on $(targets) += $(value) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
$(variable) on $(targets) = $(value) ;
|
||||
}
|
||||
}
|
||||
|
||||
rule get-target-variable ( target : variable )
|
||||
{
|
||||
return [ on $(target) return $($(variable)) ] ;
|
||||
}
|
||||
|
||||
rule import-rules-from-parent ( parent-module : this-module : user-rules )
|
||||
{
|
||||
IMPORT $(parent-module) : $(user-rules) : $(this-module) : $(user-rules) ;
|
||||
EXPORT $(this-module) : $(user-rules) ;
|
||||
}
|
||||
|
||||
rule mark-included ( targets * : includes * ) {
|
||||
INCLUDES $(targets) : $(INCLUDES) ;
|
||||
}
|
||||
}
|
||||
|
||||
PYTHON_IMPORT_RULE bootstrap : bootstrap : PyBB : bootstrap ;
|
||||
modules.poke PyBB : root : [ NORMALIZE_PATH $(.bootstrap-file:DT)/.. ] ;
|
||||
|
||||
module PyBB
|
||||
{
|
||||
bootstrap $(root) ;
|
||||
}
|
||||
|
||||
|
||||
#PYTHON_IMPORT_RULE boost.build.build_system : main : PyBB : main ;
|
||||
|
||||
#module PyBB
|
||||
#{
|
||||
# main ;
|
||||
#}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
132
src/manager.py
Normal file
132
src/manager.py
Normal file
@@ -0,0 +1,132 @@
|
||||
# Copyright Pedro Ferreira 2005. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import bjam
|
||||
|
||||
# To simplify implementation of tools level, we'll
|
||||
# have a global variable keeping the current manager.
|
||||
the_manager = None
|
||||
def get_manager():
|
||||
return the_manager
|
||||
|
||||
class Manager:
|
||||
""" This class is a facade to the Boost.Build system.
|
||||
It serves as the root to access all data structures in use.
|
||||
"""
|
||||
|
||||
def __init__ (self, engine, global_build_dir):
|
||||
""" Constructor.
|
||||
engine: the build engine that will actually construct the targets.
|
||||
"""
|
||||
from build.virtual_target import VirtualTargetRegistry
|
||||
from build.targets import TargetRegistry
|
||||
from build.project import ProjectRegistry
|
||||
from build.scanner import ScannerRegistry
|
||||
from build.errors import Errors
|
||||
from b2.util.logger import NullLogger
|
||||
from build import build_request, property_set, feature
|
||||
|
||||
self.engine_ = engine
|
||||
self.virtual_targets_ = VirtualTargetRegistry (self)
|
||||
self.projects_ = ProjectRegistry (self, global_build_dir)
|
||||
self.targets_ = TargetRegistry ()
|
||||
self.logger_ = NullLogger ()
|
||||
self.scanners_ = ScannerRegistry (self)
|
||||
self.argv_ = bjam.variable("ARGV")
|
||||
self.boost_build_path_ = bjam.variable("BOOST_BUILD_PATH")
|
||||
self.errors_ = Errors()
|
||||
self.command_line_free_features_ = property_set.empty()
|
||||
|
||||
# Object Map.
|
||||
# TODO: This is a kludge: maps object names to the actual instances.
|
||||
# Sometimes, objects are stored in properties, along with some grist.
|
||||
# This map is used to store the value and return an id, which can be later on used to retriev it back.
|
||||
self.object_map_ = {}
|
||||
|
||||
global the_manager
|
||||
the_manager = self
|
||||
|
||||
def scanners (self):
|
||||
return self.scanners_
|
||||
|
||||
def engine (self):
|
||||
return self.engine_
|
||||
|
||||
def virtual_targets (self):
|
||||
return self.virtual_targets_
|
||||
|
||||
def targets (self):
|
||||
return self.targets_
|
||||
|
||||
def projects (self):
|
||||
return self.projects_
|
||||
|
||||
def argv (self):
|
||||
return self.argv_
|
||||
|
||||
def logger (self):
|
||||
return self.logger_
|
||||
|
||||
def set_logger (self, logger):
|
||||
self.logger_ = logger
|
||||
|
||||
def errors (self):
|
||||
return self.errors_
|
||||
|
||||
def getenv(self, name):
|
||||
return bjam.variable(name)
|
||||
|
||||
def boost_build_path(self):
|
||||
return self.boost_build_path_
|
||||
|
||||
def command_line_free_features(self):
|
||||
return self.command_line_free_features_
|
||||
|
||||
def set_command_line_free_features(self, v):
|
||||
self.command_line_free_features_ = v
|
||||
|
||||
def register_object (self, value):
|
||||
""" Stores an object in a map and returns a key that can be used to retrieve it.
|
||||
"""
|
||||
key = 'object_registry_' + str (value)
|
||||
self.object_map_ [key] = value
|
||||
return key
|
||||
|
||||
def get_object (self, key):
|
||||
""" Returns a previously registered object.
|
||||
"""
|
||||
if not isinstance (key, str):
|
||||
# Probably it's the object itself.
|
||||
return key
|
||||
|
||||
return self.object_map_ [key]
|
||||
|
||||
def construct (self, properties = [], targets = []):
|
||||
""" Constructs the dependency graph.
|
||||
properties: the build properties.
|
||||
targets: the targets to consider. If none is specified, uses all.
|
||||
"""
|
||||
if not targets:
|
||||
for name, project in self.projects ().projects ():
|
||||
targets.append (project.target ())
|
||||
|
||||
property_groups = build_request.expand_no_defaults (properties)
|
||||
|
||||
virtual_targets = []
|
||||
build_prop_sets = []
|
||||
for p in property_groups:
|
||||
build_prop_sets.append (property_set.create (feature.split (p)))
|
||||
|
||||
if not build_prop_sets:
|
||||
build_prop_sets = [property_set.empty ()]
|
||||
|
||||
for build_properties in build_prop_sets:
|
||||
for target in targets:
|
||||
result = target.generate (build_properties)
|
||||
virtual_targets.extend (result.targets ())
|
||||
|
||||
actual_targets = []
|
||||
for virtual_target in virtual_targets:
|
||||
actual_targets.extend (virtual_target.actualize ())
|
||||
|
||||
0
src/tools/__init__.py
Normal file
0
src/tools/__init__.py
Normal file
722
src/tools/builtin.py
Normal file
722
src/tools/builtin.py
Normal file
@@ -0,0 +1,722 @@
|
||||
# Status: minor updates by Steven Watanabe to make gcc work
|
||||
#
|
||||
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
""" Defines standard features and rules.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from b2.build import feature, property, virtual_target, generators, type, property_set, scanner
|
||||
from b2.util.utility import *
|
||||
from b2.util import path, regex
|
||||
import b2.tools.types
|
||||
from b2.manager import get_manager
|
||||
|
||||
# Records explicit properties for a variant.
|
||||
# The key is the variant name.
|
||||
__variant_explicit_properties = {}
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
"""
|
||||
global __variant_explicit_properties
|
||||
|
||||
__variant_explicit_properties = {}
|
||||
|
||||
def variant (name, parents_or_properties, explicit_properties = []):
|
||||
""" Declares a new variant.
|
||||
First determines explicit properties for this variant, by
|
||||
refining parents' explicit properties with the passed explicit
|
||||
properties. The result is remembered and will be used if
|
||||
this variant is used as parent.
|
||||
|
||||
Second, determines the full property set for this variant by
|
||||
adding to the explicit properties default values for all properties
|
||||
which neither present nor are symmetric.
|
||||
|
||||
Lastly, makes appropriate value of 'variant' property expand
|
||||
to the full property set.
|
||||
name: Name of the variant
|
||||
parents_or_properties: Specifies parent variants, if
|
||||
'explicit_properties' are given,
|
||||
and explicit_properties otherwise.
|
||||
explicit_properties: Explicit properties.
|
||||
"""
|
||||
parents = []
|
||||
if not explicit_properties:
|
||||
if get_grist (parents_or_properties [0]):
|
||||
explicit_properties = parents_or_properties
|
||||
|
||||
else:
|
||||
parents = parents_or_properties
|
||||
|
||||
else:
|
||||
parents = parents_or_properties
|
||||
|
||||
# The problem is that we have to check for conflicts
|
||||
# between base variants.
|
||||
if len (parents) > 1:
|
||||
raise BaseException ("Multiple base variants are not yet supported")
|
||||
|
||||
inherited = []
|
||||
# Add explicitly specified properties for parents
|
||||
for p in parents:
|
||||
# TODO: the check may be stricter
|
||||
if not feature.is_implicit_value (p):
|
||||
raise BaseException ("Invalid base varaint '%s'" % p)
|
||||
|
||||
inherited += __variant_explicit_properties [p]
|
||||
|
||||
property.validate (explicit_properties)
|
||||
explicit_properties = property.refine (inherited, explicit_properties)
|
||||
|
||||
# Record explicitly specified properties for this variant
|
||||
# We do this after inheriting parents' properties, so that
|
||||
# they affect other variants, derived from this one.
|
||||
__variant_explicit_properties [name] = explicit_properties
|
||||
|
||||
feature.extend('variant', [name])
|
||||
feature.compose (replace_grist (name, '<variant>'), explicit_properties)
|
||||
|
||||
__os_names = """
|
||||
amiga aix bsd cygwin darwin dos emx freebsd hpux iphone linux netbsd
|
||||
openbsd osf qnx qnxnto sgi solaris sun sunos svr4 sysv ultrix unix unixware
|
||||
vms windows
|
||||
""".split()
|
||||
|
||||
# Translates from bjam current OS to the os tags used in host-os and target-os,
|
||||
# i.e. returns the running host-os.
|
||||
#
|
||||
def default_host_os():
|
||||
host_os = os_name()
|
||||
if host_os not in (x.upper() for x in __os_names):
|
||||
if host_os == 'NT': host_os = 'windows'
|
||||
elif host_os == 'AS400': host_os = 'unix'
|
||||
elif host_os == 'MINGW': host_os = 'windows'
|
||||
elif host_os == 'BSDI': host_os = 'bsd'
|
||||
elif host_os == 'COHERENT': host_os = 'unix'
|
||||
elif host_os == 'DRAGONFLYBSD': host_os = 'bsd'
|
||||
elif host_os == 'IRIX': host_os = 'sgi'
|
||||
elif host_os == 'MACOSX': host_os = 'darwin'
|
||||
elif host_os == 'KFREEBSD': host_os = 'freebsd'
|
||||
elif host_os == 'LINUX': host_os = 'linux'
|
||||
else: host_os = 'unix'
|
||||
return host_os.lower()
|
||||
|
||||
def register_globals ():
|
||||
""" Registers all features and variants declared by this module.
|
||||
"""
|
||||
|
||||
# This feature is used to determine which OS we're on.
|
||||
# In future, this may become <target-os> and <host-os>
|
||||
# TODO: check this. Compatibility with bjam names? Subfeature for version?
|
||||
os = sys.platform
|
||||
feature.feature ('os', [os], ['propagated', 'link-incompatible'])
|
||||
|
||||
|
||||
# The two OS features define a known set of abstract OS names. The host-os is
|
||||
# the OS under which bjam is running. Even though this should really be a fixed
|
||||
# property we need to list all the values to prevent unknown value errors. Both
|
||||
# set the default value to the current OS to account for the default use case of
|
||||
# building on the target OS.
|
||||
feature.feature('host-os', __os_names)
|
||||
feature.set_default('host-os', default_host_os())
|
||||
|
||||
feature.feature('target-os', __os_names, ['propagated', 'link-incompatible'])
|
||||
feature.set_default('target-os', default_host_os())
|
||||
|
||||
feature.feature ('toolset', [], ['implicit', 'propagated' ,'symmetric'])
|
||||
|
||||
feature.feature ('stdlib', ['native'], ['propagated', 'composite'])
|
||||
|
||||
feature.feature ('link', ['shared', 'static'], ['propagated'])
|
||||
feature.feature ('runtime-link', ['shared', 'static'], ['propagated'])
|
||||
feature.feature ('runtime-debugging', ['on', 'off'], ['propagated'])
|
||||
|
||||
|
||||
feature.feature ('optimization', ['off', 'speed', 'space'], ['propagated'])
|
||||
feature.feature ('profiling', ['off', 'on'], ['propagated'])
|
||||
feature.feature ('inlining', ['off', 'on', 'full'], ['propagated'])
|
||||
|
||||
feature.feature ('threading', ['single', 'multi'], ['propagated'])
|
||||
feature.feature ('rtti', ['on', 'off'], ['propagated'])
|
||||
feature.feature ('exception-handling', ['on', 'off'], ['propagated'])
|
||||
feature.feature ('debug-symbols', ['on', 'off'], ['propagated'])
|
||||
feature.feature ('define', [], ['free'])
|
||||
feature.feature ('include', [], ['free', 'path']) #order-sensitive
|
||||
feature.feature ('cflags', [], ['free'])
|
||||
feature.feature ('cxxflags', [], ['free'])
|
||||
feature.feature ('linkflags', [], ['free'])
|
||||
feature.feature ('archiveflags', [], ['free'])
|
||||
feature.feature ('version', [], ['free'])
|
||||
|
||||
feature.feature ('location-prefix', [], ['free'])
|
||||
|
||||
feature.feature ('action', [], ['free'])
|
||||
|
||||
|
||||
# The following features are incidental, since
|
||||
# in themself they have no effect on build products.
|
||||
# Not making them incidental will result in problems in corner
|
||||
# cases, for example:
|
||||
#
|
||||
# unit-test a : a.cpp : <use>b ;
|
||||
# lib b : a.cpp b ;
|
||||
#
|
||||
# Here, if <use> is not incidental, we'll decide we have two
|
||||
# targets for a.obj with different properties, and will complain.
|
||||
#
|
||||
# Note that making feature incidental does not mean it's ignored. It may
|
||||
# be ignored when creating the virtual target, but the rest of build process
|
||||
# will use them.
|
||||
feature.feature ('use', [], ['free', 'dependency', 'incidental'])
|
||||
feature.feature ('dependency', [], ['free', 'dependency', 'incidental'])
|
||||
feature.feature ('implicit-dependency', [], ['free', 'dependency', 'incidental'])
|
||||
|
||||
feature.feature('warnings', [
|
||||
'on', # Enable default/"reasonable" warning level for the tool.
|
||||
'all', # Enable all possible warnings issued by the tool.
|
||||
'off'], # Disable all warnings issued by the tool.
|
||||
['incidental', 'propagated'])
|
||||
|
||||
feature.feature('warnings-as-errors', [
|
||||
'off', # Do not fail the compilation if there are warnings.
|
||||
'on'], # Fail the compilation if there are warnings.
|
||||
['incidental', 'propagated'])
|
||||
|
||||
feature.feature ('source', [], ['free', 'dependency', 'incidental'])
|
||||
feature.feature ('library', [], ['free', 'dependency', 'incidental'])
|
||||
feature.feature ('file', [], ['free', 'dependency', 'incidental'])
|
||||
feature.feature ('find-shared-library', [], ['free']) #order-sensitive ;
|
||||
feature.feature ('find-static-library', [], ['free']) #order-sensitive ;
|
||||
feature.feature ('library-path', [], ['free', 'path']) #order-sensitive ;
|
||||
# Internal feature.
|
||||
feature.feature ('library-file', [], ['free', 'dependency'])
|
||||
|
||||
feature.feature ('name', [], ['free'])
|
||||
feature.feature ('tag', [], ['free'])
|
||||
feature.feature ('search', [], ['free', 'path']) #order-sensitive ;
|
||||
feature.feature ('location', [], ['free', 'path'])
|
||||
|
||||
feature.feature ('dll-path', [], ['free', 'path'])
|
||||
feature.feature ('hardcode-dll-paths', ['true', 'false'], ['incidental'])
|
||||
|
||||
|
||||
# This is internal feature which holds the paths of all dependency
|
||||
# dynamic libraries. On Windows, it's needed so that we can all
|
||||
# those paths to PATH, when running applications.
|
||||
# On Linux, it's needed to add proper -rpath-link command line options.
|
||||
feature.feature ('xdll-path', [], ['free', 'path'])
|
||||
|
||||
#provides means to specify def-file for windows dlls.
|
||||
feature.feature ('def-file', [], ['free', 'dependency'])
|
||||
|
||||
# This feature is used to allow specific generators to run.
|
||||
# For example, QT tools can only be invoked when QT library
|
||||
# is used. In that case, <allow>qt will be in usage requirement
|
||||
# of the library.
|
||||
feature.feature ('allow', [], ['free'])
|
||||
|
||||
# The addressing model to generate code for. Currently a limited set only
|
||||
# specifying the bit size of pointers.
|
||||
feature.feature('address-model', ['16', '32', '64'], ['propagated', 'optional'])
|
||||
|
||||
# Type of CPU architecture to compile for.
|
||||
feature.feature('architecture', [
|
||||
# x86 and x86-64
|
||||
'x86',
|
||||
|
||||
# ia64
|
||||
'ia64',
|
||||
|
||||
# Sparc
|
||||
'sparc',
|
||||
|
||||
# RS/6000 & PowerPC
|
||||
'power',
|
||||
|
||||
# MIPS/SGI
|
||||
'mips1', 'mips2', 'mips3', 'mips4', 'mips32', 'mips32r2', 'mips64',
|
||||
|
||||
# HP/PA-RISC
|
||||
'parisc',
|
||||
|
||||
# Advanced RISC Machines
|
||||
'arm',
|
||||
|
||||
# Combined architectures for platforms/toolsets that support building for
|
||||
# multiple architectures at once. "combined" would be the default multi-arch
|
||||
# for the toolset.
|
||||
'combined',
|
||||
'combined-x86-power'],
|
||||
|
||||
['propagated', 'optional'])
|
||||
|
||||
# The specific instruction set in an architecture to compile.
|
||||
feature.feature('instruction-set', [
|
||||
# x86 and x86-64
|
||||
'i386', 'i486', 'i586', 'i686', 'pentium', 'pentium-mmx', 'pentiumpro', 'pentium2', 'pentium3',
|
||||
'pentium3m', 'pentium-m', 'pentium4', 'pentium4m', 'prescott', 'nocona', 'conroe', 'conroe-xe',
|
||||
'conroe-l', 'allendale', 'mermon', 'mermon-xe', 'kentsfield', 'kentsfield-xe', 'penryn', 'wolfdale',
|
||||
'yorksfield', 'nehalem', 'k6', 'k6-2', 'k6-3', 'athlon', 'athlon-tbird', 'athlon-4', 'athlon-xp',
|
||||
'athlon-mp', 'k8', 'opteron', 'athlon64', 'athlon-fx', 'winchip-c6', 'winchip2', 'c3', 'c3-2',
|
||||
|
||||
# ia64
|
||||
'itanium', 'itanium1', 'merced', 'itanium2', 'mckinley',
|
||||
|
||||
# Sparc
|
||||
'v7', 'cypress', 'v8', 'supersparc', 'sparclite', 'hypersparc', 'sparclite86x', 'f930', 'f934',
|
||||
'sparclet', 'tsc701', 'v9', 'ultrasparc', 'ultrasparc3',
|
||||
|
||||
# RS/6000 & PowerPC
|
||||
'401', '403', '405', '405fp', '440', '440fp', '505', '601', '602',
|
||||
'603', '603e', '604', '604e', '620', '630', '740', '7400',
|
||||
'7450', '750', '801', '821', '823', '860', '970', '8540',
|
||||
'power-common', 'ec603e', 'g3', 'g4', 'g5', 'power', 'power2',
|
||||
'power3', 'power4', 'power5', 'powerpc', 'powerpc64', 'rios',
|
||||
'rios1', 'rsc', 'rios2', 'rs64a',
|
||||
|
||||
# MIPS
|
||||
'4kc', '4kp', '5kc', '20kc', 'm4k', 'r2000', 'r3000', 'r3900', 'r4000',
|
||||
'r4100', 'r4300', 'r4400', 'r4600', 'r4650',
|
||||
'r6000', 'r8000', 'rm7000', 'rm9000', 'orion', 'sb1', 'vr4100',
|
||||
'vr4111', 'vr4120', 'vr4130', 'vr4300',
|
||||
'vr5000', 'vr5400', 'vr5500',
|
||||
|
||||
# HP/PA-RISC
|
||||
'700', '7100', '7100lc', '7200', '7300', '8000',
|
||||
|
||||
# Advanced RISC Machines
|
||||
'armv2', 'armv2a', 'armv3', 'armv3m', 'armv4', 'armv4t', 'armv5',
|
||||
'armv5t', 'armv5te', 'armv6', 'armv6j', 'iwmmxt', 'ep9312'],
|
||||
|
||||
['propagated', 'optional'])
|
||||
|
||||
# Windows-specific features
|
||||
feature.feature ('user-interface', ['console', 'gui', 'wince', 'native', 'auto'], [])
|
||||
feature.feature ('variant', [], ['implicit', 'composite', 'propagated', 'symmetric'])
|
||||
|
||||
|
||||
variant ('debug', ['<optimization>off', '<debug-symbols>on', '<inlining>off', '<runtime-debugging>on'])
|
||||
variant ('release', ['<optimization>speed', '<debug-symbols>off', '<inlining>full',
|
||||
'<runtime-debugging>off', '<define>NDEBUG'])
|
||||
variant ('profile', ['release'], ['<profiling>on', '<debug-symbols>on'])
|
||||
|
||||
type.register ('H', ['h'])
|
||||
type.register ('HPP', ['hpp'], 'H')
|
||||
type.register ('C', ['c'])
|
||||
|
||||
|
||||
reset ()
|
||||
register_globals ()
|
||||
|
||||
class SearchedLibTarget (virtual_target.AbstractFileTarget):
|
||||
def __init__ (self, name, project, shared, real_name, search, action):
|
||||
virtual_target.AbstractFileTarget.__init__ (self, name, False, 'SEARCHED_LIB', project, action)
|
||||
|
||||
self.shared_ = shared
|
||||
self.real_name_ = real_name
|
||||
if not self.real_name_:
|
||||
self.real_name_ = name
|
||||
self.search_ = search
|
||||
|
||||
def shared (self):
|
||||
return self.shared_
|
||||
|
||||
def real_name (self):
|
||||
return self.real_name_
|
||||
|
||||
def search (self):
|
||||
return self.search_
|
||||
|
||||
def actualize_location (self, target):
|
||||
project.manager ().engine ().add_not_file_target (target)
|
||||
|
||||
def path (self):
|
||||
#FIXME: several functions rely on this not being None
|
||||
return ""
|
||||
|
||||
|
||||
class CScanner (scanner.Scanner):
|
||||
def __init__ (self, includes):
|
||||
scanner.Scanner.__init__ (self)
|
||||
|
||||
self.includes_ = includes
|
||||
|
||||
def pattern (self):
|
||||
return r'#[ \t]*include[ ]*(<(.*)>|"(.*)")'
|
||||
|
||||
def process (self, target, matches, binding):
|
||||
|
||||
angle = regex.transform (matches, "<(.*)>")
|
||||
quoted = regex.transform (matches, '"(.*)"')
|
||||
|
||||
g = str(id(self))
|
||||
b = os.path.normpath(os.path.dirname(binding[0]))
|
||||
|
||||
# Attach binding of including file to included targets.
|
||||
# When target is directly created from virtual target
|
||||
# this extra information is unnecessary. But in other
|
||||
# cases, it allows to distinguish between two headers of the
|
||||
# same name included from different places.
|
||||
# We don't need this extra information for angle includes,
|
||||
# since they should not depend on including file (we can't
|
||||
# get literal "." in include path).
|
||||
g2 = g + "#" + b
|
||||
|
||||
g = "<" + g + ">"
|
||||
g2 = "<" + g2 + ">"
|
||||
angle = [g + x for x in angle]
|
||||
quoted = [g2 + x for x in quoted]
|
||||
|
||||
all = angle + quoted
|
||||
bjam.call("mark-included", target, all)
|
||||
|
||||
engine = get_manager().engine()
|
||||
engine.set_target_variable(angle, "SEARCH", self.includes_)
|
||||
engine.set_target_variable(quoted, "SEARCH", self.includes_)
|
||||
|
||||
# Just propagate current scanner to includes, in a hope
|
||||
# that includes do not change scanners.
|
||||
get_manager().scanners().propagate(self, angle + quoted)
|
||||
|
||||
scanner.register (CScanner, 'include')
|
||||
type.set_scanner ('CPP', CScanner)
|
||||
|
||||
# Ported to trunk@47077
|
||||
class LibGenerator (generators.Generator):
|
||||
""" The generator class for libraries (target type LIB). Depending on properties it will
|
||||
request building of the approapriate specific type -- SHARED_LIB, STATIC_LIB or
|
||||
SHARED_LIB.
|
||||
"""
|
||||
|
||||
def __init__(self, id = 'LibGenerator', composing = True, source_types = [], target_types_and_names = ['LIB'], requirements = []):
|
||||
generators.Generator.__init__(self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run(self, project, name, prop_set, sources):
|
||||
# The lib generator is composing, and can be only invoked with
|
||||
# explicit name. This check is present in generator.run (and so in
|
||||
# builtin.LinkingGenerator), but duplicate it here to avoid doing
|
||||
# extra work.
|
||||
if name:
|
||||
properties = prop_set.raw()
|
||||
# Determine the needed target type
|
||||
actual_type = None
|
||||
properties_grist = get_grist(properties)
|
||||
if '<source>' not in properties_grist and \
|
||||
('<search>' in properties_grist or '<name>' in properties_grist):
|
||||
actual_type = 'SEARCHED_LIB'
|
||||
elif '<file>' in properties_grist:
|
||||
# The generator for
|
||||
actual_type = 'LIB'
|
||||
elif '<link>shared' in properties:
|
||||
actual_type = 'SHARED_LIB'
|
||||
else:
|
||||
actual_type = 'STATIC_LIB'
|
||||
|
||||
prop_set = prop_set.add_raw(['<main-target-type>LIB'])
|
||||
|
||||
# Construct the target.
|
||||
return generators.construct(project, name, actual_type, prop_set, sources)
|
||||
|
||||
def viable_source_types(self):
|
||||
return ['*']
|
||||
|
||||
generators.register(LibGenerator())
|
||||
|
||||
### # The implementation of the 'lib' rule. Beyond standard syntax that rule allows
|
||||
### # simplified:
|
||||
### # lib a b c ;
|
||||
### # so we need to write code to handle that syntax.
|
||||
### rule lib ( names + : sources * : requirements * : default-build *
|
||||
### : usage-requirements * )
|
||||
### {
|
||||
### local project = [ project.current ] ;
|
||||
###
|
||||
### # This is a circular module dependency, so it must be imported here
|
||||
### import targets ;
|
||||
###
|
||||
### local result ;
|
||||
### if ! $(sources) && ! $(requirements)
|
||||
### && ! $(default-build) && ! $(usage-requirements)
|
||||
### {
|
||||
### for local name in $(names)
|
||||
### {
|
||||
### result += [
|
||||
### targets.main-target-alternative
|
||||
### [ new typed-target $(name) : $(project) : LIB
|
||||
### :
|
||||
### : [ targets.main-target-requirements $(requirements) <name>$(name) :
|
||||
### $(project) ]
|
||||
### : [ targets.main-target-default-build $(default-build) : $(project) ]
|
||||
### : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
|
||||
### ] ] ;
|
||||
### }
|
||||
### }
|
||||
### else
|
||||
### {
|
||||
### if $(names[2])
|
||||
### {
|
||||
### errors.user-error "When several names are given to the 'lib' rule" :
|
||||
### "it's not allowed to specify sources or requirements. " ;
|
||||
### }
|
||||
###
|
||||
### local name = $(names[1]) ;
|
||||
### result = [ targets.main-target-alternative
|
||||
### [ new typed-target $(name) : $(project) : LIB
|
||||
### : [ targets.main-target-sources $(sources) : $(name) ]
|
||||
### : [ targets.main-target-requirements $(requirements) : $(project) ]
|
||||
### : [ targets.main-target-default-build $(default-build) : $(project) ]
|
||||
### : [ targets.main-target-usage-requirements $(usage-requirements) : $(project) ]
|
||||
### ] ] ;
|
||||
### }
|
||||
### return $(result) ;
|
||||
### }
|
||||
### IMPORT $(__name__) : lib : : lib ;
|
||||
|
||||
# Updated to trunk@47077
|
||||
class SearchedLibGenerator (generators.Generator):
|
||||
def __init__ (self, id = 'SearchedLibGenerator', composing = False, source_types = [], target_types_and_names = ['SEARCHED_LIB'], requirements = []):
|
||||
# TODO: the comment below looks strange. There are no requirements!
|
||||
# The requirements cause the generators to be tried *only* when we're building
|
||||
# lib target and there's 'search' feature. This seems ugly --- all we want
|
||||
# is make sure SearchedLibGenerator is not invoked deep in transformation
|
||||
# search.
|
||||
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run(self, project, name, prop_set, sources):
|
||||
if not name:
|
||||
return None
|
||||
|
||||
# If name is empty, it means we're called not from top-level.
|
||||
# In this case, we just fail immediately, because SearchedLibGenerator
|
||||
# cannot be used to produce intermediate targets.
|
||||
|
||||
properties = prop_set.raw ()
|
||||
shared = '<link>shared' in properties
|
||||
|
||||
a = virtual_target.NullAction (project.manager(), prop_set)
|
||||
|
||||
real_name = feature.get_values ('<name>', properties)
|
||||
if real_name:
|
||||
real_name = real_name[0]
|
||||
else:
|
||||
real_nake = name
|
||||
search = feature.get_values('<search>', properties)
|
||||
usage_requirements = property_set.create(['<xdll-path>' + p for p in search])
|
||||
t = SearchedLibTarget(name, project, shared, real_name, search, a)
|
||||
|
||||
# We return sources for a simple reason. If there's
|
||||
# lib png : z : <name>png ;
|
||||
# the 'z' target should be returned, so that apps linking to
|
||||
# 'png' will link to 'z', too.
|
||||
return(usage_requirements, [b2.manager.get_manager().virtual_targets().register(t)] + sources)
|
||||
|
||||
generators.register (SearchedLibGenerator ())
|
||||
|
||||
### class prebuilt-lib-generator : generator
|
||||
### {
|
||||
### rule __init__ ( * : * )
|
||||
### {
|
||||
### generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
|
||||
### }
|
||||
###
|
||||
### rule run ( project name ? : prop_set : sources * : multiple ? )
|
||||
### {
|
||||
### local f = [ $(prop_set).get <file> ] ;
|
||||
### return $(f) $(sources) ;
|
||||
### }
|
||||
### }
|
||||
###
|
||||
### generators.register
|
||||
### [ new prebuilt-lib-generator builtin.prebuilt : : LIB : <file> ] ;
|
||||
|
||||
|
||||
class CompileAction (virtual_target.Action):
|
||||
def __init__ (self, manager, sources, action_name, prop_set):
|
||||
virtual_target.Action.__init__ (self, manager, sources, action_name, prop_set)
|
||||
|
||||
def adjust_properties (self, prop_set):
|
||||
""" For all virtual targets for the same dependency graph as self,
|
||||
i.e. which belong to the same main target, add their directories
|
||||
to include path.
|
||||
"""
|
||||
s = self.targets () [0].creating_subvariant ()
|
||||
|
||||
return prop_set.add_raw (s.implicit_includes ('include', 'H'))
|
||||
|
||||
class CCompilingGenerator (generators.Generator):
|
||||
""" Declare a special compiler generator.
|
||||
The only thing it does is changing the type used to represent
|
||||
'action' in the constructed dependency graph to 'CompileAction'.
|
||||
That class in turn adds additional include paths to handle a case
|
||||
when a source file includes headers which are generated themselfs.
|
||||
"""
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
# TODO: (PF) What to do with optional_properties? It seemed that, in the bjam version, the arguments are wrong.
|
||||
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def action_class (self):
|
||||
return CompileAction
|
||||
|
||||
def register_c_compiler (id, source_types, target_types, requirements, optional_properties = []):
|
||||
g = CCompilingGenerator (id, False, source_types, target_types, requirements + optional_properties)
|
||||
return generators.register (g)
|
||||
|
||||
|
||||
class LinkingGenerator (generators.Generator):
|
||||
""" The generator class for handling EXE and SHARED_LIB creation.
|
||||
"""
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run (self, project, name, prop_set, sources):
|
||||
lib_sources = prop_set.get('<library>')
|
||||
[ sources.append (project.manager().get_object(x)) for x in lib_sources ]
|
||||
|
||||
# Add <library-path> properties for all searched libraries
|
||||
extra = []
|
||||
for s in sources:
|
||||
if s.type () == 'SEARCHED_LIB':
|
||||
search = s.search()
|
||||
extra.append(replace_grist(search, '<library-path>'))
|
||||
|
||||
orig_xdll_path = []
|
||||
|
||||
if prop_set.get('<hardcode-dll-paths>') == ['true'] and type.is_derived(self.target_types_ [0], 'EXE'):
|
||||
xdll_path = prop_set.get('<xdll-path>')
|
||||
orig_xdll_path = [ replace_grist(x, '<dll-path>') for x in xdll_path ]
|
||||
# It's possible that we have libraries in sources which did not came
|
||||
# from 'lib' target. For example, libraries which are specified
|
||||
# just as filenames as sources. We don't have xdll-path properties
|
||||
# for such target, but still need to add proper dll-path properties.
|
||||
for s in sources:
|
||||
if type.is_derived (s.type (), 'SHARED_LIB') and not s.action ():
|
||||
# Unfortunately, we don't have a good way to find the path
|
||||
# to a file, so use this nasty approach.
|
||||
p = s.project()
|
||||
location = path.root(s.name(), p.get('source-location'))
|
||||
xdll_path.append(path.parent(location))
|
||||
|
||||
extra += [ replace_grist(x, '<dll-path>') for x in xdll_path ]
|
||||
|
||||
if extra:
|
||||
prop_set = prop_set.add_raw (extra)
|
||||
|
||||
result = generators.Generator.run(self, project, name, prop_set, sources)
|
||||
|
||||
if result:
|
||||
ur = self.extra_usage_requirements(result, prop_set)
|
||||
ur = ur.add(property_set.create(orig_xdll_path))
|
||||
else:
|
||||
return None
|
||||
|
||||
return(ur, result)
|
||||
|
||||
def extra_usage_requirements (self, created_targets, prop_set):
|
||||
|
||||
result = property_set.empty ()
|
||||
extra = []
|
||||
|
||||
# Add appropriate <xdll-path> usage requirements.
|
||||
raw = prop_set.raw ()
|
||||
if '<link>shared' in raw:
|
||||
paths = []
|
||||
|
||||
# TODO: is it safe to use the current directory? I think we should use
|
||||
# another mechanism to allow this to be run from anywhere.
|
||||
pwd = os.getcwd()
|
||||
|
||||
for t in created_targets:
|
||||
if type.is_derived(t.type(), 'SHARED_LIB'):
|
||||
paths.append(path.root(path.make(t.path()), pwd))
|
||||
|
||||
extra += replace_grist(paths, '<xdll-path>')
|
||||
|
||||
# We need to pass <xdll-path> features that we've got from sources,
|
||||
# because if shared library is built, exe which uses it must know paths
|
||||
# to other shared libraries this one depends on, to be able to find them
|
||||
# all at runtime.
|
||||
|
||||
# Just pass all features in property_set, it's theorically possible
|
||||
# that we'll propagate <xdll-path> features explicitly specified by
|
||||
# the user, but then the user's to blaim for using internal feature.
|
||||
values = prop_set.get('<xdll-path>')
|
||||
extra += replace_grist(values, '<xdll-path>')
|
||||
|
||||
if extra:
|
||||
result = property_set.create(extra)
|
||||
|
||||
return result
|
||||
|
||||
def generated_targets (self, sources, prop_set, project, name):
|
||||
|
||||
# sources to pass to inherited rule
|
||||
sources2 = []
|
||||
# properties to pass to inherited rule
|
||||
properties2 = []
|
||||
# sources which are libraries
|
||||
libraries = []
|
||||
|
||||
# Searched libraries are not passed as argument to linker
|
||||
# but via some option. So, we pass them to the action
|
||||
# via property.
|
||||
properties2 = prop_set.raw()
|
||||
fsa = []
|
||||
fst = []
|
||||
for s in sources:
|
||||
if type.is_derived(s.type(), 'SEARCHED_LIB'):
|
||||
name = s.real_name()
|
||||
if s.shared():
|
||||
fsa.append(name)
|
||||
|
||||
else:
|
||||
fst.append(name)
|
||||
|
||||
else:
|
||||
sources2.append(s)
|
||||
|
||||
if fsa:
|
||||
properties2 += [replace_grist('&&'.join(fsa), '<find-shared-library>')]
|
||||
if fst:
|
||||
properties2 += [replace_grist('&&'.join(fst), '<find-static-library>')]
|
||||
|
||||
spawn = generators.Generator.generated_targets(self, sources2, property_set.create(properties2), project, name)
|
||||
|
||||
return spawn
|
||||
|
||||
|
||||
def register_linker(id, source_types, target_types, requirements):
|
||||
g = LinkingGenerator(id, True, source_types, target_types, requirements)
|
||||
generators.register(g)
|
||||
|
||||
class ArchiveGenerator (generators.Generator):
|
||||
""" The generator class for handling STATIC_LIB creation.
|
||||
"""
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run (self, project, name, prop_set, sources):
|
||||
sources += prop_set.get ('<library>')
|
||||
|
||||
result = generators.Generator.run (self, project, name, prop_set, sources)
|
||||
|
||||
return result
|
||||
|
||||
### rule register-archiver ( id composing ? : source_types + : target_types + :
|
||||
### requirements * )
|
||||
### {
|
||||
### local g = [ new ArchiveGenerator $(id) $(composing) : $(source_types)
|
||||
### : $(target_types) : $(requirements) ] ;
|
||||
### generators.register $(g) ;
|
||||
### }
|
||||
###
|
||||
###
|
||||
### IMPORT $(__name__) : register-linker register-archiver
|
||||
### : : generators.register-linker generators.register-archiver ;
|
||||
###
|
||||
###
|
||||
###
|
||||
817
src/tools/common.py
Normal file
817
src/tools/common.py
Normal file
@@ -0,0 +1,817 @@
|
||||
# Status: being ported by Steven Watanabe
|
||||
# Base revision: 47174
|
||||
#
|
||||
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
""" Provides actions common to all toolsets, such as creating directories and
|
||||
removing files.
|
||||
"""
|
||||
|
||||
import re
|
||||
import bjam
|
||||
import os
|
||||
import os.path
|
||||
|
||||
from b2.build import feature
|
||||
from b2.util.utility import *
|
||||
from b2.util import path
|
||||
|
||||
__re__before_first_dash = re.compile ('([^-]*)-')
|
||||
|
||||
def reset ():
|
||||
""" Clear the module state. This is mainly for testing purposes.
|
||||
Note that this must be called _after_ resetting the module 'feature'.
|
||||
"""
|
||||
global __had_unspecified_value, __had_value, __declared_subfeature
|
||||
global __init_loc
|
||||
global __all_signatures, __debug_configuration, __show_configuration
|
||||
|
||||
# Stores toolsets without specified initialization values.
|
||||
__had_unspecified_value = {}
|
||||
|
||||
# Stores toolsets with specified initialization values.
|
||||
__had_value = {}
|
||||
|
||||
# Stores toolsets with declared subfeatures.
|
||||
__declared_subfeature = {}
|
||||
|
||||
# Stores all signatures of the toolsets.
|
||||
__all_signatures = {}
|
||||
|
||||
# Stores the initialization locations of each toolset
|
||||
__init_loc = {}
|
||||
|
||||
__debug_configuration = '--debug-configuration' in bjam.variable('ARGV')
|
||||
__show_configuration = '--show-configuration' in bjam.variable('ARGV')
|
||||
|
||||
reset()
|
||||
|
||||
# ported from trunk@47174
|
||||
class Configurations(object):
|
||||
"""
|
||||
This class helps to manage toolset configurations. Each configuration
|
||||
has a unique ID and one or more parameters. A typical example of a unique ID
|
||||
is a condition generated by 'common.check-init-parameters' rule. Other kinds
|
||||
of IDs can be used. Parameters may include any details about the configuration
|
||||
like 'command', 'path', etc.
|
||||
|
||||
A toolset configuration may be in one of the following states:
|
||||
|
||||
- registered
|
||||
Configuration has been registered (e.g. by autodetection code) but has
|
||||
not yet been marked as used, i.e. 'toolset.using' rule has not yet been
|
||||
called for it.
|
||||
- used
|
||||
Once called 'toolset.using' rule marks the configuration as 'used'.
|
||||
|
||||
The main difference between the states above is that while a configuration is
|
||||
'registered' its options can be freely changed. This is useful in particular
|
||||
for autodetection code - all detected configurations may be safely overwritten
|
||||
by user code.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.used_ = set()
|
||||
self.all_ = set()
|
||||
self.params = {}
|
||||
|
||||
def register(self, id):
|
||||
"""
|
||||
Registers a configuration.
|
||||
|
||||
Returns True if the configuration has been added and False if
|
||||
it already exists. Reports an error if the configuration is 'used'.
|
||||
"""
|
||||
if id in self.used_:
|
||||
#FIXME
|
||||
errors.error("common: the configuration '$(id)' is in use")
|
||||
|
||||
if id not in self.all_:
|
||||
self.all_ += [id]
|
||||
|
||||
# Indicate that a new configuration has been added.
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def use(self, id):
|
||||
"""
|
||||
Mark a configuration as 'used'.
|
||||
|
||||
Returns True if the state of the configuration has been changed to
|
||||
'used' and False if it the state wasn't changed. Reports an error
|
||||
if the configuration isn't known.
|
||||
"""
|
||||
if id not in self.all_:
|
||||
#FIXME:
|
||||
errors.error("common: the configuration '$(id)' is not known")
|
||||
|
||||
if id not in self.used_:
|
||||
self.used_ += [id]
|
||||
|
||||
# indicate that the configuration has been marked as 'used'
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def all(self):
|
||||
""" Return all registered configurations. """
|
||||
return self.all_
|
||||
|
||||
def used(self):
|
||||
""" Return all used configurations. """
|
||||
return self.used_
|
||||
|
||||
def get(self, id, param):
|
||||
""" Returns the value of a configuration parameter. """
|
||||
self.params_.getdefault(param, {}).getdefault(id, None)
|
||||
|
||||
def set (self, id, param, value):
|
||||
""" Sets the value of a configuration parameter. """
|
||||
self.params_.setdefault(param, {})[id] = value
|
||||
|
||||
# Ported from trunk@47174
|
||||
def check_init_parameters(toolset, requirement, *args):
|
||||
""" The rule for checking toolset parameters. Trailing parameters should all be
|
||||
parameter name/value pairs. The rule will check that each parameter either has
|
||||
a value in each invocation or has no value in each invocation. Also, the rule
|
||||
will check that the combination of all parameter values is unique in all
|
||||
invocations.
|
||||
|
||||
Each parameter name corresponds to a subfeature. This rule will declare a
|
||||
subfeature the first time a non-empty parameter value is passed and will
|
||||
extend it with all the values.
|
||||
|
||||
The return value from this rule is a condition to be used for flags settings.
|
||||
"""
|
||||
# The type checking here is my best guess about
|
||||
# what the types should be.
|
||||
assert(isinstance(toolset, str))
|
||||
assert(isinstance(requirement, str) or requirement is None)
|
||||
sig = toolset
|
||||
condition = replace_grist(toolset, '<toolset>')
|
||||
subcondition = []
|
||||
|
||||
for arg in args:
|
||||
assert(isinstance(arg, tuple))
|
||||
assert(len(arg) == 2)
|
||||
name = arg[0]
|
||||
value = arg[1]
|
||||
assert(isinstance(name, str))
|
||||
assert(isinstance(value, str) or value is None)
|
||||
|
||||
str_toolset_name = str((toolset, name))
|
||||
|
||||
# FIXME: is this the correct translation?
|
||||
### if $(value)-is-not-empty
|
||||
if value is not None:
|
||||
condition = condition + '-' + value
|
||||
if __had_unspecified_value.has_key(str_toolset_name):
|
||||
raise BaseException("'%s' initialization: parameter '%s' inconsistent\n" \
|
||||
"no value was specified in earlier initialization\n" \
|
||||
"an explicit value is specified now" % (toolset, name))
|
||||
|
||||
# The logic below is for intel compiler. It calls this rule
|
||||
# with 'intel-linux' and 'intel-win' as toolset, so we need to
|
||||
# get the base part of toolset name.
|
||||
# We can't pass 'intel' as toolset, because it that case it will
|
||||
# be impossible to register versionles intel-linux and
|
||||
# intel-win of specific version.
|
||||
t = toolset
|
||||
m = __re__before_first_dash.match(toolset)
|
||||
if m:
|
||||
t = m.group(1)
|
||||
|
||||
if not __had_value.has_key(str_toolset_name):
|
||||
if not __declared_subfeature.has_key(str((t, name))):
|
||||
feature.subfeature('toolset', t, name, [], ['propagated'])
|
||||
__declared_subfeature[str((t, name))] = True
|
||||
|
||||
__had_value[str_toolset_name] = True
|
||||
|
||||
feature.extend_subfeature('toolset', t, name, [value])
|
||||
subcondition += ['<toolset-' + t + ':' + name + '>' + value ]
|
||||
|
||||
else:
|
||||
if __had_value.has_key(str_toolset_name):
|
||||
raise BaseException ("'%s' initialization: parameter '%s' inconsistent\n" \
|
||||
"an explicit value was specified in an earlier initialization\n" \
|
||||
"no value is specified now" % (toolset, name))
|
||||
|
||||
__had_unspecified_value[str_toolset_name] = True
|
||||
|
||||
if value == None: value = ''
|
||||
|
||||
sig = sig + value + '-'
|
||||
|
||||
if __all_signatures.has_key(sig):
|
||||
message = "duplicate initialization of '%s' with the following parameters: " % toolset
|
||||
|
||||
for arg in args:
|
||||
name = arg[0]
|
||||
value = arg[1]
|
||||
if value == None: value = '<unspecified>'
|
||||
|
||||
message += "'%s' = '%s'\n" % (name, value)
|
||||
|
||||
raise BaseException(message)
|
||||
|
||||
__all_signatures[sig] = True
|
||||
# FIXME
|
||||
__init_loc[sig] = "User location unknown" #[ errors.nearest-user-location ] ;
|
||||
|
||||
# If we have a requirement, this version should only be applied under that
|
||||
# condition. To accomplish this we add a toolset requirement that imposes
|
||||
# the toolset subcondition, which encodes the version.
|
||||
if requirement:
|
||||
r = ['<toolset>' + toolset, requirement]
|
||||
r = ','.join(r)
|
||||
toolset.add_requirements([r + ':' + c for c in subcondition])
|
||||
|
||||
# We add the requirements, if any, to the condition to scope the toolset
|
||||
# variables and options to this specific version.
|
||||
condition = [condition]
|
||||
if requirement:
|
||||
condition += [requirement]
|
||||
|
||||
if __show_configuration:
|
||||
print "notice:", condition
|
||||
return ['/'.join(condition)]
|
||||
|
||||
# Ported from trunk@47077
|
||||
def get_invocation_command_nodefault(
|
||||
toolset, tool, user_provided_command=[], additional_paths=[], path_last=False):
|
||||
"""
|
||||
A helper rule to get the command to invoke some tool. If
|
||||
'user-provided-command' is not given, tries to find binary named 'tool' in
|
||||
PATH and in the passed 'additional-path'. Otherwise, verifies that the first
|
||||
element of 'user-provided-command' is an existing program.
|
||||
|
||||
This rule returns the command to be used when invoking the tool. If we can't
|
||||
find the tool, a warning is issued. If 'path-last' is specified, PATH is
|
||||
checked after 'additional-paths' when searching for 'tool'.
|
||||
"""
|
||||
assert(isinstance(toolset, str))
|
||||
assert(isinstance(tool, str))
|
||||
assert(isinstance(user_provided_command, list))
|
||||
if additional_paths is not None:
|
||||
assert(isinstance(additional_paths, list))
|
||||
assert(all([isinstance(path, str) for path in additional_paths]))
|
||||
assert(all(isinstance(path, str) for path in additional_paths))
|
||||
assert(isinstance(path_last, bool))
|
||||
|
||||
if not user_provided_command:
|
||||
command = find_tool(tool, additional_paths, path_last)
|
||||
if not command and __debug_configuration:
|
||||
print "warning: toolset", toolset, "initialization: can't find tool, tool"
|
||||
#FIXME
|
||||
#print "warning: initialized from" [ errors.nearest-user-location ] ;
|
||||
else:
|
||||
command = check_tool(user_provided_command)
|
||||
if not command and __debug_configuration:
|
||||
print "warning: toolset", toolset, "initialization:"
|
||||
print "warning: can't find user-provided command", user_provided_command
|
||||
#FIXME
|
||||
#ECHO "warning: initialized from" [ errors.nearest-user-location ]
|
||||
|
||||
assert(isinstance(command, str))
|
||||
|
||||
return command
|
||||
|
||||
# ported from trunk@47174
|
||||
def get_invocation_command(toolset, tool, user_provided_command = [],
|
||||
additional_paths = [], path_last = False):
|
||||
""" Same as get_invocation_command_nodefault, except that if no tool is found,
|
||||
returns either the user-provided-command, if present, or the 'tool' parameter.
|
||||
"""
|
||||
|
||||
assert(isinstance(toolset, str))
|
||||
assert(isinstance(tool, str))
|
||||
assert(isinstance(user_provided_command, list))
|
||||
if additional_paths is not None:
|
||||
assert(isinstance(additional_paths, list))
|
||||
assert(all([isinstance(path, str) for path in additional_paths]))
|
||||
assert(isinstance(path_last, bool))
|
||||
|
||||
result = get_invocation_command_nodefault(toolset, tool,
|
||||
user_provided_command,
|
||||
additional_paths,
|
||||
path_last)
|
||||
|
||||
if not result:
|
||||
if user_provided_command:
|
||||
result = user_provided_command[0]
|
||||
else:
|
||||
result = tool
|
||||
|
||||
assert(isinstance(result, str))
|
||||
|
||||
return result
|
||||
|
||||
# ported from trunk@47281
|
||||
def get_absolute_tool_path(command):
|
||||
"""
|
||||
Given an invocation command,
|
||||
return the absolute path to the command. This works even if commnad
|
||||
has not path element and is present in PATH.
|
||||
"""
|
||||
if os.path.dirname(command):
|
||||
return os.path.dirname(command)
|
||||
else:
|
||||
programs = path.programs_path()
|
||||
m = path.glob(programs, [command, command + '.exe' ])
|
||||
if not len(m):
|
||||
print "Could not find:", command, "in", programs
|
||||
return os.path.dirname(m[0])
|
||||
|
||||
# ported from trunk@47174
|
||||
def find_tool(name, additional_paths = [], path_last = False):
|
||||
""" Attempts to find tool (binary) named 'name' in PATH and in
|
||||
'additional-paths'. If found in path, returns 'name'. If
|
||||
found in additional paths, returns full name. If the tool
|
||||
is found in several directories, returns the first path found.
|
||||
Otherwise, returns the empty string. If 'path_last' is specified,
|
||||
path is checked after 'additional_paths'.
|
||||
"""
|
||||
assert(isinstance(name, str))
|
||||
assert(isinstance(additional_paths, list))
|
||||
assert(isinstance(path_last, bool))
|
||||
|
||||
programs = path.programs_path()
|
||||
match = path.glob(programs, [name, name + '.exe'])
|
||||
additional_match = path.glob(additional_paths, [name, name + '.exe'])
|
||||
|
||||
result = []
|
||||
if path_last:
|
||||
result = additional_match
|
||||
if not result and match:
|
||||
result = match
|
||||
|
||||
else:
|
||||
if match:
|
||||
result = match
|
||||
|
||||
elif additional_match:
|
||||
result = additional_match
|
||||
|
||||
if result:
|
||||
return path.native(result[0])
|
||||
else:
|
||||
return ''
|
||||
|
||||
#ported from trunk@47281
|
||||
def check_tool_aux(command):
|
||||
""" Checks if 'command' can be found either in path
|
||||
or is a full name to an existing file.
|
||||
"""
|
||||
assert(isinstance(command, str))
|
||||
dirname = os.path.dirname(command)
|
||||
if dirname:
|
||||
if os.path.exists(command):
|
||||
return command
|
||||
# Both NT and Cygwin will run .exe files by their unqualified names.
|
||||
elif on_windows() and os.path.exists(command + '.exe'):
|
||||
return command
|
||||
# Only NT will run .bat files by their unqualified names.
|
||||
elif os_name() == 'NT' and os.path.exists(command + '.bat'):
|
||||
return command
|
||||
else:
|
||||
paths = path.programs_path()
|
||||
if path.glob(paths, [command]):
|
||||
return command
|
||||
|
||||
# ported from trunk@47281
|
||||
def check_tool(command):
|
||||
""" Checks that a tool can be invoked by 'command'.
|
||||
If command is not an absolute path, checks if it can be found in 'path'.
|
||||
If comand is absolute path, check that it exists. Returns 'command'
|
||||
if ok and empty string otherwise.
|
||||
"""
|
||||
assert(isinstance(command, list))
|
||||
assert(all(isinstance(c, str) for c in command))
|
||||
#FIXME: why do we check the first and last elements????
|
||||
if check_tool_aux(command[0]) or check_tool_aux(command[-1]):
|
||||
return command
|
||||
|
||||
# ported from trunk@47281
|
||||
def handle_options(tool, condition, command, options):
|
||||
""" Handle common options for toolset, specifically sets the following
|
||||
flag variables:
|
||||
- CONFIG_COMMAND to 'command'
|
||||
- OPTIOns for compile to the value of <compileflags> in options
|
||||
- OPTIONS for compile.c to the value of <cflags> in options
|
||||
- OPTIONS for compile.c++ to the value of <cxxflags> in options
|
||||
- OPTIONS for compile.fortran to the value of <fflags> in options
|
||||
- OPTIONs for link to the value of <linkflags> in options
|
||||
"""
|
||||
from b2.build import toolset
|
||||
|
||||
assert(isinstance(tool, str))
|
||||
assert(isinstance(condition, list))
|
||||
assert(isinstance(command, str))
|
||||
assert(isinstance(options, list))
|
||||
assert(command)
|
||||
toolset.flags(tool, 'CONFIG_COMMAND', condition, [command])
|
||||
toolset.flags(tool + '.compile', 'OPTIONS', condition, feature.get_values('<compileflags>', options))
|
||||
toolset.flags(tool + '.compile.c', 'OPTIONS', condition, feature.get_values('<cflags>', options))
|
||||
toolset.flags(tool + '.compile.c++', 'OPTIONS', condition, feature.get_values('<cxxflags>', options))
|
||||
toolset.flags(tool + '.compile.fortran', 'OPTIONS', condition, feature.get_values('<fflags>', options))
|
||||
toolset.flags(tool + '.link', 'OPTIONS', condition, feature.get_values('<linkflags>', options))
|
||||
|
||||
# ported from trunk@47281
|
||||
def get_program_files_dir():
|
||||
""" returns the location of the "program files" directory on a windows
|
||||
platform
|
||||
"""
|
||||
ProgramFiles = bjam.variable("ProgramFiles")
|
||||
if ProgramFiles:
|
||||
ProgramFiles = ' '.join(ProgramFiles)
|
||||
else:
|
||||
ProgramFiles = "c:\\Program Files"
|
||||
return ProgramFiles
|
||||
|
||||
# ported from trunk@47281
|
||||
def rm_command():
|
||||
return __RM
|
||||
|
||||
# ported from trunk@47281
|
||||
def copy_command():
|
||||
return __CP
|
||||
|
||||
# ported from trunk@47281
|
||||
def variable_setting_command(variable, value):
|
||||
"""
|
||||
Returns the command needed to set an environment variable on the current
|
||||
platform. The variable setting persists through all following commands and is
|
||||
visible in the environment seen by subsequently executed commands. In other
|
||||
words, on Unix systems, the variable is exported, which is consistent with the
|
||||
only possible behavior on Windows systems.
|
||||
"""
|
||||
assert(isinstance(variable, str))
|
||||
assert(isinstance(value, str))
|
||||
|
||||
if os_name() == 'NT':
|
||||
return "set " + variable + "=" + value + os.linesep
|
||||
else:
|
||||
# (todo)
|
||||
# The following does not work on CYGWIN and needs to be fixed. On
|
||||
# CYGWIN the $(nl) variable holds a Windows new-line \r\n sequence that
|
||||
# messes up the executed export command which then reports that the
|
||||
# passed variable name is incorrect. This is most likely due to the
|
||||
# extra \r character getting interpreted as a part of the variable name.
|
||||
#
|
||||
# Several ideas pop to mind on how to fix this:
|
||||
# * One way would be to separate the commands using the ; shell
|
||||
# command separator. This seems like the quickest possible
|
||||
# solution but I do not know whether this would break code on any
|
||||
# platforms I I have no access to.
|
||||
# * Another would be to not use the terminating $(nl) but that would
|
||||
# require updating all the using code so it does not simply
|
||||
# prepend this variable to its own commands.
|
||||
# * I guess the cleanest solution would be to update Boost Jam to
|
||||
# allow explicitly specifying \n & \r characters in its scripts
|
||||
# instead of always relying only on the 'current OS native newline
|
||||
# sequence'.
|
||||
#
|
||||
# Some code found to depend on this behaviour:
|
||||
# * This Boost Build module.
|
||||
# * __test__ rule.
|
||||
# * path-variable-setting-command rule.
|
||||
# * python.jam toolset.
|
||||
# * xsltproc.jam toolset.
|
||||
# * fop.jam toolset.
|
||||
# (todo) (07.07.2008.) (Jurko)
|
||||
#
|
||||
# I think that this works correctly in python -- Steven Watanabe
|
||||
return variable + "=" + value + os.linesep + "export " + variable + os.linesep
|
||||
|
||||
def path_variable_setting_command(variable, paths):
|
||||
"""
|
||||
Returns a command to sets a named shell path variable to the given NATIVE
|
||||
paths on the current platform.
|
||||
"""
|
||||
assert(isinstance(variable, str))
|
||||
assert(isinstance(paths, list))
|
||||
sep = os.path.pathsep
|
||||
return variable_setting_command(variable, sep.join(paths))
|
||||
|
||||
def prepend_path_variable_command(variable, paths):
|
||||
"""
|
||||
Returns a command that prepends the given paths to the named path variable on
|
||||
the current platform.
|
||||
"""
|
||||
return path_variable_setting_command(variable,
|
||||
paths + os.environ(variable).split(os.pathsep))
|
||||
|
||||
def file_creation_command():
|
||||
"""
|
||||
Return a command which can create a file. If 'r' is result of invocation, then
|
||||
'r foobar' will create foobar with unspecified content. What happens if file
|
||||
already exists is unspecified.
|
||||
"""
|
||||
if os_name() == 'NT':
|
||||
return "echo. > "
|
||||
else:
|
||||
return "touch "
|
||||
|
||||
#FIXME: global variable
|
||||
__mkdir_set = set()
|
||||
__re_windows_drive = re.compile(r'^.*:\$')
|
||||
|
||||
def mkdir(engine, target):
|
||||
# If dir exists, do not update it. Do this even for $(DOT).
|
||||
bjam.call('NOUPDATE', target)
|
||||
|
||||
global __mkdir_set
|
||||
|
||||
# FIXME: Where is DOT defined?
|
||||
#if $(<) != $(DOT) && ! $($(<)-mkdir):
|
||||
if target != '.' and target not in __mkdir_set:
|
||||
# Cheesy gate to prevent multiple invocations on same dir.
|
||||
__mkdir_set.add(target)
|
||||
|
||||
# Schedule the mkdir build action.
|
||||
if os_name() == 'NT':
|
||||
engine.set_update_action("common.MkDir1-quick-fix-for-windows", target, [], None)
|
||||
else:
|
||||
engine.set_update_action("common.MkDir1-quick-fix-for-unix", target, [], None)
|
||||
|
||||
# Prepare a Jam 'dirs' target that can be used to make the build only
|
||||
# construct all the target directories.
|
||||
engine.add_dependency('dirs', target)
|
||||
|
||||
# Recursively create parent directories. $(<:P) = $(<)'s parent & we
|
||||
# recurse until root.
|
||||
|
||||
s = os.path.dirname(target)
|
||||
if os_name() == 'NT':
|
||||
if(__re_windows_drive.match(s)):
|
||||
s = ''
|
||||
|
||||
if s:
|
||||
if s != target:
|
||||
engine.add_dependency(target, s)
|
||||
mkdir(engine, s)
|
||||
else:
|
||||
bjam.call('NOTFILE', s)
|
||||
|
||||
__re_version = re.compile(r'^([^.]+)[.]([^.]+)[.]?([^.]*)')
|
||||
|
||||
def format_name(format, name, target_type, prop_set):
|
||||
""" Given a target, as given to a custom tag rule, returns a string formatted
|
||||
according to the passed format. Format is a list of properties that is
|
||||
represented in the result. For each element of format the corresponding target
|
||||
information is obtained and added to the result string. For all, but the
|
||||
literal, the format value is taken as the as string to prepend to the output
|
||||
to join the item to the rest of the result. If not given "-" is used as a
|
||||
joiner.
|
||||
|
||||
The format options can be:
|
||||
|
||||
<base>[joiner]
|
||||
:: The basename of the target name.
|
||||
<toolset>[joiner]
|
||||
:: The abbreviated toolset tag being used to build the target.
|
||||
<threading>[joiner]
|
||||
:: Indication of a multi-threaded build.
|
||||
<runtime>[joiner]
|
||||
:: Collective tag of the build runtime.
|
||||
<version:/version-feature | X.Y[.Z]/>[joiner]
|
||||
:: Short version tag taken from the given "version-feature"
|
||||
in the build properties. Or if not present, the literal
|
||||
value as the version number.
|
||||
<property:/property-name/>[joiner]
|
||||
:: Direct lookup of the given property-name value in the
|
||||
build properties. /property-name/ is a regular expression.
|
||||
e.g. <property:toolset-.*:flavor> will match every toolset.
|
||||
/otherwise/
|
||||
:: The literal value of the format argument.
|
||||
|
||||
For example this format:
|
||||
|
||||
boost_ <base> <toolset> <threading> <runtime> <version:boost-version>
|
||||
|
||||
Might return:
|
||||
|
||||
boost_thread-vc80-mt-gd-1_33.dll, or
|
||||
boost_regex-vc80-gd-1_33.dll
|
||||
|
||||
The returned name also has the target type specific prefix and suffix which
|
||||
puts it in a ready form to use as the value from a custom tag rule.
|
||||
"""
|
||||
assert(isinstance(format, list))
|
||||
assert(isinstance(name, str))
|
||||
assert(isinstance(target_type, str) or not type)
|
||||
# assert(isinstance(prop_set, property_set.PropertySet))
|
||||
if type.is_derived(target_type, 'LIB'):
|
||||
result = "" ;
|
||||
for f in format:
|
||||
grist = get_grist(f)
|
||||
if grist == '<base>':
|
||||
result += os.path.basename(name)
|
||||
elif grist == '<toolset>':
|
||||
result += join_tag(ungrist(f),
|
||||
toolset_tag(name, target_type, prop_set))
|
||||
elif grist == '<threading>':
|
||||
result += join_tag(ungrist(f),
|
||||
threading_tag(name, target_type, prop_set))
|
||||
elif grist == '<runtime>':
|
||||
result += join_tag(ungrist(f),
|
||||
runtime_tag(name, target_type, prop_set))
|
||||
elif grist.startswith('<version:'):
|
||||
key = grist[len('<version:'):-1]
|
||||
version = prop_set.get('<' + key + '>')
|
||||
if not version:
|
||||
version = key
|
||||
version = __re_version.match(version)
|
||||
result += join_tag(ungrist(f), version[1] + '_' + version[2])
|
||||
elif grist.startswith('<property:'):
|
||||
key = grist[len('<property:'):-1]
|
||||
property_re = re.compile('<(' + key + ')>')
|
||||
p0 = None
|
||||
for prop in prop_set.raw():
|
||||
match = property_re.match(prop)
|
||||
if match:
|
||||
p0 = match[1]
|
||||
break
|
||||
if p0:
|
||||
p = prop_set.get('<' + p0 + '>')
|
||||
if p:
|
||||
assert(len(p) == 1)
|
||||
result += join_tag(ungrist(f), p)
|
||||
else:
|
||||
result += ungrist(f)
|
||||
|
||||
result = virtual_target.add_prefix_and_suffix(
|
||||
''.join(result), target_type, prop_set)
|
||||
return result
|
||||
|
||||
def join_tag(joiner, tag):
|
||||
if not joiner: joiner = '-'
|
||||
return joiner + tag
|
||||
|
||||
__re_toolset_version = re.compile(r"<toolset.*version>(\d+)[.](\d*)")
|
||||
|
||||
def toolset_tag(name, target_type, prop_set):
|
||||
tag = ''
|
||||
|
||||
properties = prop_set.raw()
|
||||
tools = prop_set.get('<toolset>')
|
||||
assert(len(tools) == 0)
|
||||
tools = tools[0]
|
||||
if tools.startswith('borland'): tag += 'bcb'
|
||||
elif tools.startswith('como'): tag += 'como'
|
||||
elif tools.startswith('cw'): tag += 'cw'
|
||||
elif tools.startswith('darwin'): tag += 'xgcc'
|
||||
elif tools.startswith('edg'): tag += edg
|
||||
elif tools.startswith('gcc'):
|
||||
flavor = prop_set.get('<toolset-gcc:flavor>')
|
||||
''.find
|
||||
if flavor.find('mingw') != -1:
|
||||
tag += 'mgw'
|
||||
else:
|
||||
tag += 'gcc'
|
||||
elif tools == 'intel':
|
||||
if prop_set.get('<toolset-intel:platform>') == ['win']:
|
||||
tag += 'iw'
|
||||
else:
|
||||
tag += 'il'
|
||||
elif tools.startswith('kcc'): tag += 'kcc'
|
||||
elif tools.startswith('kylix'): tag += 'bck'
|
||||
#case metrowerks* : tag += cw ;
|
||||
#case mingw* : tag += mgw ;
|
||||
elif tools.startswith('mipspro'): tag += 'mp'
|
||||
elif tools.startswith('msvc'): tag += 'vc'
|
||||
elif tools.startswith('sun'): tag += 'sw'
|
||||
elif tools.startswith('tru64cxx'): tag += 'tru'
|
||||
elif tools.startswith('vacpp'): tag += 'xlc'
|
||||
|
||||
for prop in properties:
|
||||
match = __re_toolset_version.match(prop)
|
||||
if(match):
|
||||
version = match
|
||||
break
|
||||
version_string = None
|
||||
# For historical reasons, vc6.0 and vc7.0 use different naming.
|
||||
if tag == 'vc':
|
||||
if version.group(1) == '6':
|
||||
# Cancel minor version.
|
||||
version_string = '6'
|
||||
elif version.group(1) == '7' and version.group(2) == '0':
|
||||
version_string = '7'
|
||||
|
||||
# On intel, version is not added, because it does not matter and it's the
|
||||
# version of vc used as backend that matters. Ideally, we'd encode the
|
||||
# backend version but that would break compatibility with V1.
|
||||
elif tag == 'iw':
|
||||
version_string = ''
|
||||
|
||||
# On borland, version is not added for compatibility with V1.
|
||||
elif tag == 'bcb':
|
||||
version_string = ''
|
||||
|
||||
if version_string is None:
|
||||
version = version.group(1) + version.group(2)
|
||||
|
||||
tag += version
|
||||
|
||||
return tag
|
||||
|
||||
|
||||
def threading_tag(name, target_type, prop_set):
|
||||
tag = ''
|
||||
properties = prop_set.raw()
|
||||
if '<threading>multi' in properties: tag = 'mt'
|
||||
|
||||
return tag
|
||||
|
||||
|
||||
def runtime_tag(name, target_type, prop_set ):
|
||||
tag = ''
|
||||
|
||||
properties = prop_set.raw()
|
||||
if '<runtime-link>static' in properties: tag += 's'
|
||||
|
||||
# This is an ugly thing. In V1, there's a code to automatically detect which
|
||||
# properties affect a target. So, if <runtime-debugging> does not affect gcc
|
||||
# toolset, the tag rules won't even see <runtime-debugging>. Similar
|
||||
# functionality in V2 is not implemented yet, so we just check for toolsets
|
||||
# which are known to care about runtime debug.
|
||||
if '<toolset>msvc' in properties \
|
||||
or '<stdlib>stlport' in properties \
|
||||
or '<toolset-intel:platform>win' in properties:
|
||||
if '<runtime-debugging>on' in properties: tag += 'g'
|
||||
|
||||
if '<python-debugging>on' in properties: tag += 'y'
|
||||
if '<variant>debug' in properties: tag += 'd'
|
||||
if '<stdlib>stlport' in properties: tag += 'p'
|
||||
if '<stdlib-stlport:iostream>hostios' in properties: tag += 'n'
|
||||
|
||||
return tag
|
||||
|
||||
|
||||
## TODO:
|
||||
##rule __test__ ( )
|
||||
##{
|
||||
## import assert ;
|
||||
##
|
||||
## local nl = "
|
||||
##" ;
|
||||
##
|
||||
## local save-os = [ modules.peek os : .name ] ;
|
||||
##
|
||||
## modules.poke os : .name : LINUX ;
|
||||
##
|
||||
## assert.result "PATH=foo:bar:baz$(nl)export PATH$(nl)"
|
||||
## : path-variable-setting-command PATH : foo bar baz ;
|
||||
##
|
||||
## assert.result "PATH=foo:bar:$PATH$(nl)export PATH$(nl)"
|
||||
## : prepend-path-variable-command PATH : foo bar ;
|
||||
##
|
||||
## modules.poke os : .name : NT ;
|
||||
##
|
||||
## assert.result "set PATH=foo;bar;baz$(nl)"
|
||||
## : path-variable-setting-command PATH : foo bar baz ;
|
||||
##
|
||||
## assert.result "set PATH=foo;bar;%PATH%$(nl)"
|
||||
## : prepend-path-variable-command PATH : foo bar ;
|
||||
##
|
||||
## modules.poke os : .name : $(save-os) ;
|
||||
##}
|
||||
|
||||
def init(manager):
|
||||
engine = manager.engine()
|
||||
|
||||
engine.register_action("common.MkDir1-quick-fix-for-unix", 'mkdir -p "$(<)"')
|
||||
engine.register_action("common.MkDir1-quick-fix-for-windows", 'if not exist "$(<)\\" mkdir "$(<)"')
|
||||
|
||||
import b2.tools.make
|
||||
import b2.build.alias
|
||||
|
||||
global __RM, __CP, __IGNORE, __LN
|
||||
# ported from trunk@47281
|
||||
if os_name() == 'NT':
|
||||
__RM = 'del /f /q'
|
||||
__CP = 'copy'
|
||||
__IGNORE = '2>nul >nul & setlocal'
|
||||
__LN = __CP
|
||||
#if not __LN:
|
||||
# __LN = CP
|
||||
else:
|
||||
__RM = 'rm -f'
|
||||
__CP = 'cp'
|
||||
__IGNORE = ''
|
||||
__LN = 'ln'
|
||||
|
||||
engine.register_action("common.Clean", __RM + ' "$(>)"',
|
||||
flags=['piecemeal', 'together', 'existing'])
|
||||
engine.register_action("common.copy", __CP + ' "$(>)" "$(<)"')
|
||||
engine.register_action("common.RmTemps", __RM + ' "$(>)" ' + __IGNORE,
|
||||
flags=['quietly', 'updated', 'piecemeal', 'together'])
|
||||
|
||||
engine.register_action("common.hard-link",
|
||||
__RM + ' "$(<)" 2$(NULL_OUT) $(NULL_OUT)' + os.linesep +
|
||||
__LN + ' "$(>)" "$(<)" $(NULL_OUT)')
|
||||
57
src/tools/darwin.py
Normal file
57
src/tools/darwin.py
Normal file
@@ -0,0 +1,57 @@
|
||||
# Copyright (C) Christopher Currie 2003. Permission to copy, use,
|
||||
# modify, sell and distribute this software is granted provided this
|
||||
# copyright notice appears in all copies. This software is provided
|
||||
# "as is" without express or implied warranty, and with no claim as to
|
||||
# its suitability for any purpose.
|
||||
|
||||
# Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/
|
||||
# for explanation why it's a separate toolset.
|
||||
|
||||
import common, gcc, builtin
|
||||
from b2.build import feature, toolset, type, action, generators
|
||||
from b2.util.utility import *
|
||||
|
||||
toolset.register ('darwin')
|
||||
|
||||
toolset.inherit_generators ('darwin', [], 'gcc')
|
||||
toolset.inherit_flags ('darwin', 'gcc')
|
||||
toolset.inherit_rules ('darwin', 'gcc')
|
||||
|
||||
def init (version = None, command = None, options = None):
|
||||
options = to_seq (options)
|
||||
|
||||
condition = common.check_init_parameters ('darwin', None, ('version', version))
|
||||
|
||||
command = common.get_invocation_command ('darwin', 'g++', command)
|
||||
|
||||
common.handle_options ('darwin', condition, command, options)
|
||||
|
||||
gcc.init_link_flags ('darwin', 'darwin', condition)
|
||||
|
||||
# Darwin has a different shared library suffix
|
||||
type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib')
|
||||
|
||||
# we need to be able to tell the type of .dylib files
|
||||
type.register_suffixes ('dylib', 'SHARED_LIB')
|
||||
|
||||
feature.feature ('framework', [], ['free'])
|
||||
|
||||
toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic'])
|
||||
toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp'])
|
||||
toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates'])
|
||||
|
||||
toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>')
|
||||
|
||||
# This is flag is useful for debugging the link step
|
||||
# uncomment to see what libtool is doing under the hood
|
||||
# toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v'])
|
||||
|
||||
action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
|
||||
|
||||
# TODO: how to set 'bind LIBRARIES'?
|
||||
action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)'])
|
||||
|
||||
def darwin_archive (manager, targets, sources, properties):
|
||||
pass
|
||||
|
||||
action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"'])
|
||||
796
src/tools/gcc.py
Normal file
796
src/tools/gcc.py
Normal file
@@ -0,0 +1,796 @@
|
||||
# Status: being ported by Steven Watanabe
|
||||
# Base revision: 47077
|
||||
# TODO: common.jam needs to be ported
|
||||
# TODO: generators.jam needs to have register_c_compiler.
|
||||
#
|
||||
# Copyright 2001 David Abrahams.
|
||||
# Copyright 2002-2006 Rene Rivera.
|
||||
# Copyright 2002-2003 Vladimir Prus.
|
||||
# Copyright (c) 2005 Reece H. Dunn.
|
||||
# Copyright 2006 Ilya Sokolov.
|
||||
# Copyright 2007 Roland Schwarz
|
||||
# Copyright 2007 Boris Gubenko.
|
||||
# Copyright 2008 Steven Watanabe
|
||||
#
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or copy at
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
|
||||
import bjam
|
||||
|
||||
from b2.tools import unix, common, rc, pch, builtin
|
||||
from b2.build import feature, type, toolset, generators
|
||||
from b2.util.utility import os_name, on_windows
|
||||
from b2.manager import get_manager
|
||||
from b2.build.generators import Generator
|
||||
from b2.build.toolset import flags
|
||||
from b2.util.utility import to_seq
|
||||
|
||||
__debug = None
|
||||
|
||||
def debug():
|
||||
global __debug
|
||||
if __debug is None:
|
||||
__debug = "--debug-configuration" in bjam.variable("ARGV")
|
||||
return __debug
|
||||
|
||||
feature.extend('toolset', ['gcc'])
|
||||
|
||||
|
||||
toolset.inherit_generators('gcc', [], 'unix', ['unix.link', 'unix.link.dll'])
|
||||
toolset.inherit_flags('gcc', 'unix')
|
||||
toolset.inherit_rules('gcc', 'unix')
|
||||
|
||||
generators.override('gcc.prebuilt', 'builtin.prebuilt')
|
||||
generators.override('gcc.searched-lib-generator', 'searched-lib-generator')
|
||||
|
||||
# Target naming is determined by types/lib.jam and the settings below this
|
||||
# comment.
|
||||
#
|
||||
# On *nix:
|
||||
# libxxx.a static library
|
||||
# libxxx.so shared library
|
||||
#
|
||||
# On windows (mingw):
|
||||
# libxxx.lib static library
|
||||
# xxx.dll DLL
|
||||
# xxx.lib import library
|
||||
#
|
||||
# On windows (cygwin) i.e. <target-os>cygwin
|
||||
# libxxx.a static library
|
||||
# xxx.dll DLL
|
||||
# libxxx.dll.a import library
|
||||
#
|
||||
# Note: user can always override by using the <tag>@rule
|
||||
# This settings have been choosen, so that mingw
|
||||
# is in line with msvc naming conventions. For
|
||||
# cygwin the cygwin naming convention has been choosen.
|
||||
|
||||
# Make the "o" suffix used for gcc toolset on all
|
||||
# platforms
|
||||
type.set_generated_target_suffix('OBJ', ['<toolset>gcc'], 'o')
|
||||
type.set_generated_target_suffix('STATIC_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'a')
|
||||
|
||||
type.set_generated_target_suffix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'dll.a')
|
||||
type.set_generated_target_prefix('IMPORT_LIB', ['<toolset>gcc', '<target-os>cygwin'], 'lib')
|
||||
|
||||
__machine_match = re.compile('^([^ ]+)')
|
||||
__version_match = re.compile('^([0-9.]+)')
|
||||
|
||||
def init(version = None, command = None, options = None):
|
||||
"""
|
||||
Initializes the gcc toolset for the given version. If necessary, command may
|
||||
be used to specify where the compiler is located. The parameter 'options' is a
|
||||
space-delimited list of options, each one specified as
|
||||
<option-name>option-value. Valid option names are: cxxflags, linkflags and
|
||||
linker-type. Accepted linker-type values are gnu, darwin, osf, hpux or sun
|
||||
and the default value will be selected based on the current OS.
|
||||
Example:
|
||||
using gcc : 3.4 : : <cxxflags>foo <linkflags>bar <linker-type>sun ;
|
||||
"""
|
||||
|
||||
options = to_seq(options)
|
||||
command = to_seq(command)
|
||||
|
||||
# Information about the gcc command...
|
||||
# The command.
|
||||
command = to_seq(common.get_invocation_command('gcc', 'g++', command))
|
||||
# The root directory of the tool install.
|
||||
root = feature.get_values('<root>', options) ;
|
||||
# The bin directory where to find the command to execute.
|
||||
bin = None
|
||||
# The flavor of compiler.
|
||||
flavor = feature.get_values('<flavor>', options)
|
||||
# Autodetect the root and bin dir if not given.
|
||||
if command:
|
||||
if not bin:
|
||||
bin = common.get_absolute_tool_path(command[-1])
|
||||
if not root:
|
||||
root = os.path.dirname(bin)
|
||||
# Autodetect the version and flavor if not given.
|
||||
if command:
|
||||
machine_info = subprocess.Popen(command + ['-dumpmachine'], stdout=subprocess.PIPE).communicate()[0]
|
||||
machine = __machine_match.search(machine_info).group(1)
|
||||
|
||||
version_info = subprocess.Popen(command + ['-dumpversion'], stdout=subprocess.PIPE).communicate()[0]
|
||||
version = __version_match.search(version_info).group(1)
|
||||
if not flavor and machine.find('mingw') != -1:
|
||||
flavor = 'mingw'
|
||||
|
||||
condition = None
|
||||
if flavor:
|
||||
condition = common.check_init_parameters('gcc', None,
|
||||
('version', version),
|
||||
('flavor', flavor))
|
||||
else:
|
||||
condition = common.check_init_parameters('gcc', None,
|
||||
('version', version))
|
||||
|
||||
if command:
|
||||
command = command[0]
|
||||
|
||||
common.handle_options('gcc', condition, command, options)
|
||||
|
||||
linker = feature.get_values('<linker-type>', options)
|
||||
if not linker:
|
||||
if os_name() == 'OSF':
|
||||
linker = 'osf'
|
||||
elif os_name() == 'HPUX':
|
||||
linker = 'hpux' ;
|
||||
else:
|
||||
linker = 'gnu'
|
||||
|
||||
init_link_flags('gcc', linker, condition)
|
||||
|
||||
# If gcc is installed in non-standard location, we'd need to add
|
||||
# LD_LIBRARY_PATH when running programs created with it (for unit-test/run
|
||||
# rules).
|
||||
if command:
|
||||
# On multilib 64-bit boxes, there are both 32-bit and 64-bit libraries
|
||||
# and all must be added to LD_LIBRARY_PATH. The linker will pick the
|
||||
# right onces. Note that we don't provide a clean way to build 32-bit
|
||||
# binary with 64-bit compiler, but user can always pass -m32 manually.
|
||||
lib_path = [os.path.join(root, 'bin'),
|
||||
os.path.join(root, 'lib'),
|
||||
os.path.join(root, 'lib32'),
|
||||
os.path.join(root, 'lib64')]
|
||||
if debug():
|
||||
print 'notice: using gcc libraries ::', condition, '::', lib_path
|
||||
toolset.flags('gcc.link', 'RUN_PATH', condition, lib_path)
|
||||
|
||||
# If it's not a system gcc install we should adjust the various programs as
|
||||
# needed to prefer using the install specific versions. This is essential
|
||||
# for correct use of MinGW and for cross-compiling.
|
||||
|
||||
# - The archive builder.
|
||||
archiver = common.get_invocation_command('gcc',
|
||||
'ar', feature.get_values('<archiver>', options), [bin], path_last=True)
|
||||
toolset.flags('gcc.archive', '.AR', condition, [archiver])
|
||||
if debug():
|
||||
print 'notice: using gcc archiver ::', condition, '::', archiver
|
||||
|
||||
# - The resource compiler.
|
||||
rc_command = common.get_invocation_command_nodefault('gcc',
|
||||
'windres', feature.get_values('<rc>', options), [bin], path_last=True)
|
||||
rc_type = feature.get_values('<rc-type>', options)
|
||||
|
||||
if not rc_type:
|
||||
rc_type = 'windres'
|
||||
|
||||
if not rc_command:
|
||||
# If we can't find an RC compiler we fallback to a null RC compiler that
|
||||
# creates empty object files. This allows the same Jamfiles to work
|
||||
# across the board. The null RC uses the assembler to create the empty
|
||||
# objects, so configure that.
|
||||
rc_command = common.get_invocation_command('gcc', 'as', [], [bin], path_last=True)
|
||||
rc_type = 'null'
|
||||
rc.configure(rc_command, condition, '<rc-type>' + rc_type)
|
||||
|
||||
###if [ os.name ] = NT
|
||||
###{
|
||||
### # This causes single-line command invocation to not go through .bat files,
|
||||
### # thus avoiding command-line length limitations.
|
||||
### JAMSHELL = % ;
|
||||
###}
|
||||
|
||||
#FIXME: when register_c_compiler is moved to
|
||||
# generators, these should be updated
|
||||
builtin.register_c_compiler('gcc.compile.c++', ['CPP'], ['OBJ'], ['<toolset>gcc'])
|
||||
builtin.register_c_compiler('gcc.compile.c', ['C'], ['OBJ'], ['<toolset>gcc'])
|
||||
builtin.register_c_compiler('gcc.compile.asm', ['ASM'], ['OBJ'], ['<toolset>gcc'])
|
||||
|
||||
# pch support
|
||||
|
||||
# The compiler looks for a precompiled header in each directory just before it
|
||||
# looks for the include file in that directory. The name searched for is the
|
||||
# name specified in the #include directive with ".gch" suffix appended. The
|
||||
# logic in gcc-pch-generator will make sure that BASE_PCH suffix is appended to
|
||||
# full name of the header.
|
||||
|
||||
type.set_generated_target_suffix('PCH', ['<toolset>gcc'], 'gch')
|
||||
|
||||
# GCC-specific pch generator.
|
||||
class GccPchGenerator(pch.PchGenerator):
|
||||
|
||||
# Inherit the __init__ method
|
||||
|
||||
def run_pch(self, project, name, prop_set, sources):
|
||||
# Find the header in sources. Ignore any CPP sources.
|
||||
header = None
|
||||
for s in sources:
|
||||
if type.is_derived(s.type, 'H'):
|
||||
header = s
|
||||
|
||||
# Error handling: Base header file name should be the same as the base
|
||||
# precompiled header name.
|
||||
header_name = header.name
|
||||
header_basename = os.path.basename(header_name).rsplit('.', 1)[0]
|
||||
if header_basename != name:
|
||||
location = project.project_module
|
||||
###FIXME:
|
||||
raise Exception()
|
||||
### errors.user-error "in" $(location)": pch target name `"$(name)"' should be the same as the base name of header file `"$(header-name)"'" ;
|
||||
|
||||
pch_file = Generator.run(self, project, name, prop_set, [header])
|
||||
|
||||
# return result of base class and pch-file property as usage-requirements
|
||||
# FIXME: what about multiple results from generator.run?
|
||||
return (property_set.create('<pch-file>' + pch_file[0], '<cflags>-Winvalid-pch'),
|
||||
pch_file)
|
||||
|
||||
# Calls the base version specifying source's name as the name of the created
|
||||
# target. As result, the PCH will be named whatever.hpp.gch, and not
|
||||
# whatever.gch.
|
||||
def generated_targets(self, sources, prop_set, project, name = None):
|
||||
name = sources[0].name
|
||||
return Generator.generated_targets(self, sources,
|
||||
prop_set, project, name)
|
||||
|
||||
# Note: the 'H' source type will catch both '.h' header and '.hpp' header. The
|
||||
# latter have HPP type, but HPP type is derived from H. The type of compilation
|
||||
# is determined entirely by the destination type.
|
||||
generators.register(GccPchGenerator('gcc.compile.c.pch', False, ['H'], ['C_PCH'], ['<pch>on', '<toolset>gcc' ]))
|
||||
generators.register(GccPchGenerator('gcc.compile.c++.pch', False, ['H'], ['CPP_PCH'], ['<pch>on', '<toolset>gcc' ]))
|
||||
|
||||
# Override default do-nothing generators.
|
||||
generators.override('gcc.compile.c.pch', 'pch.default-c-pch-generator')
|
||||
generators.override('gcc.compile.c++.pch', 'pch.default-cpp-pch-generator')
|
||||
|
||||
flags('gcc.compile', 'PCH_FILE', ['<pch>on'], ['<pch-file>'])
|
||||
|
||||
# Declare flags and action for compilation
|
||||
flags('gcc.compile', 'OPTIONS', ['<optimization>off'], ['-O0'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<optimization>speed'], ['-O3'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<optimization>space'], ['-Os'])
|
||||
|
||||
flags('gcc.compile', 'OPTIONS', ['<inlining>off'], ['-fno-inline'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<inlining>on'], ['-Wno-inline'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<inlining>full'], ['-finline-functions', '-Wno-inline'])
|
||||
|
||||
flags('gcc.compile', 'OPTIONS', ['<warnings>off'], ['-w'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<warnings>on'], ['-Wall'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<warnings>all'], ['-Wall', '-pedantic'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<warnings-as-errors>on'], ['-Werror'])
|
||||
|
||||
flags('gcc.compile', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<profiling>on'], ['-pg'])
|
||||
flags('gcc.compile', 'OPTIONS', ['<rtti>off'], ['-fno-rtti'])
|
||||
|
||||
# On cygwin and mingw, gcc generates position independent code by default, and
|
||||
# warns if -fPIC is specified. This might not be the right way of checking if
|
||||
# we're using cygwin. For example, it's possible to run cygwin gcc from NT
|
||||
# shell, or using crosscompiling. But we'll solve that problem when it's time.
|
||||
# In that case we'll just add another parameter to 'init' and move this login
|
||||
# inside 'init'.
|
||||
if not os_name () in ['CYGWIN', 'NT']:
|
||||
print "osname:", os_name()
|
||||
# This logic will add -fPIC for all compilations:
|
||||
#
|
||||
# lib a : a.cpp b ;
|
||||
# obj b : b.cpp ;
|
||||
# exe c : c.cpp a d ;
|
||||
# obj d : d.cpp ;
|
||||
#
|
||||
# This all is fine, except that 'd' will be compiled with -fPIC even though
|
||||
# it's not needed, as 'd' is used only in exe. However, it's hard to detect
|
||||
# where a target is going to be used. Alternative, we can set -fPIC only
|
||||
# when main target type is LIB but than 'b' will be compiled without -fPIC.
|
||||
# In x86-64 that will lead to link errors. So, compile everything with
|
||||
# -fPIC.
|
||||
#
|
||||
# Yet another alternative would be to create propagated <sharedable>
|
||||
# feature, and set it when building shared libraries, but that's hard to
|
||||
# implement and will increase target path length even more.
|
||||
flags('gcc.compile', 'OPTIONS', ['<link>shared'], ['-fPIC'])
|
||||
|
||||
if os_name() != 'NT' and os_name() != 'OSF' and os_name() != 'HPUX':
|
||||
# OSF does have an option called -soname but it doesn't seem to work as
|
||||
# expected, therefore it has been disabled.
|
||||
HAVE_SONAME = ''
|
||||
SONAME_OPTION = '-h'
|
||||
|
||||
|
||||
flags('gcc.compile', 'USER_OPTIONS', [], ['<cflags>'])
|
||||
flags('gcc.compile.c++', 'USER_OPTIONS',[], ['<cxxflags>'])
|
||||
flags('gcc.compile', 'DEFINES', [], ['<define>'])
|
||||
flags('gcc.compile', 'INCLUDES', [], ['<include>'])
|
||||
|
||||
engine = get_manager().engine()
|
||||
|
||||
engine.register_action('gcc.compile.c++.pch',
|
||||
'"$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
|
||||
|
||||
engine.register_action('gcc.compile.c.pch',
|
||||
'"$(CONFIG_COMMAND)" -x c-header $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"')
|
||||
|
||||
|
||||
def gcc_compile_cpp(targets, sources, properties):
|
||||
# Some extensions are compiled as C++ by default. For others, we need to
|
||||
# pass -x c++. We could always pass -x c++ but distcc does not work with it.
|
||||
extension = os.path.splitext (sources [0]) [1]
|
||||
lang = ''
|
||||
if not extension in ['.cc', '.cp', '.cxx', '.cpp', '.c++', '.C']:
|
||||
lang = '-x c++'
|
||||
get_manager().engine().set_target_variable (targets, 'LANG', lang)
|
||||
engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
|
||||
|
||||
def gcc_compile_c(targets, sources, properties):
|
||||
engine = get_manager().engine()
|
||||
# If we use the name g++ then default file suffix -> language mapping does
|
||||
# not work. So have to pass -x option. Maybe, we can work around this by
|
||||
# allowing the user to specify both C and C++ compiler names.
|
||||
#if $(>:S) != .c
|
||||
#{
|
||||
engine.set_target_variable (targets, 'LANG', '-x c')
|
||||
#}
|
||||
engine.add_dependency(targets, bjam.call('get-target-variable', targets, 'PCH_FILE'))
|
||||
|
||||
engine.register_action(
|
||||
'gcc.compile.c++',
|
||||
'"$(CONFIG_COMMAND)" $(LANG) -ftemplate-depth-128 $(OPTIONS) ' +
|
||||
'$(USER_OPTIONS) -D$(DEFINES) -I"$(PCH_FILE:D)" -I"$(INCLUDES)" ' +
|
||||
'-c -o "$(<:W)" "$(>:W)"',
|
||||
function=gcc_compile_cpp,
|
||||
bound_list=['PCH_FILE'])
|
||||
|
||||
engine.register_action(
|
||||
'gcc.compile.c',
|
||||
'"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) ' +
|
||||
'-I"$(PCH_FILE:D)" -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
|
||||
function=gcc_compile_c,
|
||||
bound_list=['PCH_FILE'])
|
||||
|
||||
def gcc_compile_asm(targets, sources, properties):
|
||||
get_manager().engine().set_target_variable(targets, 'LANG', '-x assembler-with-cpp')
|
||||
|
||||
engine.register_action(
|
||||
'gcc.compile.asm',
|
||||
'"$(CONFIG_COMMAND)" $(LANG) $(OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -o "$(<)" "$(>)"',
|
||||
function=gcc_compile_asm)
|
||||
|
||||
|
||||
class GccLinkingGenerator(unix.UnixLinkingGenerator):
|
||||
"""
|
||||
The class which check that we don't try to use the <runtime-link>static
|
||||
property while creating or using shared library, since it's not supported by
|
||||
gcc/libc.
|
||||
"""
|
||||
def run(self, project, name, prop_set, sources):
|
||||
# TODO: Replace this with the use of a target-os property.
|
||||
|
||||
no_static_link = False
|
||||
if bjam.variable('UNIX'):
|
||||
no_static_link = True;
|
||||
##FIXME: what does this mean?
|
||||
## {
|
||||
## switch [ modules.peek : JAMUNAME ]
|
||||
## {
|
||||
## case * : no-static-link = true ;
|
||||
## }
|
||||
## }
|
||||
|
||||
properties = prop_set.raw()
|
||||
reason = None
|
||||
if no_static_link and '<runtime-link>static' in properties:
|
||||
if '<link>shared' in properties:
|
||||
reason = "On gcc, DLL can't be build with '<runtime-link>static'."
|
||||
elif type.is_derived(self.target_types[0], 'EXE'):
|
||||
for s in sources:
|
||||
source_type = s.type()
|
||||
if source_type and type.is_derived(source_type, 'SHARED_LIB'):
|
||||
reason = "On gcc, using DLLS together with the " +\
|
||||
"<runtime-link>static options is not possible "
|
||||
if reason:
|
||||
print 'warning:', reason
|
||||
print 'warning:',\
|
||||
"It is suggested to use '<runtime-link>static' together",\
|
||||
"with '<link>static'." ;
|
||||
return
|
||||
else:
|
||||
generated_targets = unix.UnixLinkingGenerator.run(self, project,
|
||||
name, prop_set, sources)
|
||||
return generated_targets
|
||||
|
||||
if on_windows():
|
||||
flags('gcc.link.dll', '.IMPLIB-COMMAND', [], ['-Wl,--out-implib,'])
|
||||
generators.register(
|
||||
GccLinkingGenerator('gcc.link', True,
|
||||
['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
|
||||
[ 'EXE' ],
|
||||
[ '<toolset>gcc' ]))
|
||||
generators.register(
|
||||
GccLinkingGenerator('gcc.link.dll', True,
|
||||
['OBJ', 'SEARCHED_LIB', 'STATIC_LIB', 'IMPORT_LIB'],
|
||||
['IMPORT_LIB', 'SHARED_LIB'],
|
||||
['<toolset>gcc']))
|
||||
else:
|
||||
generators.register(
|
||||
GccLinkingGenerator('gcc.link', True,
|
||||
['LIB', 'OBJ'],
|
||||
['EXE'],
|
||||
['<toolset>gcc']))
|
||||
generators.register(
|
||||
GccLinkingGenerator('gcc.link.dll', True,
|
||||
['LIB', 'OBJ'],
|
||||
['SHARED_LIB'],
|
||||
['<toolset>gcc']))
|
||||
|
||||
# Declare flags for linking.
|
||||
# First, the common flags.
|
||||
flags('gcc.link', 'OPTIONS', ['<debug-symbols>on'], ['-g'])
|
||||
flags('gcc.link', 'OPTIONS', ['<profiling>on'], ['-pg'])
|
||||
flags('gcc.link', 'USER_OPTIONS', [], ['<linkflags>'])
|
||||
flags('gcc.link', 'LINKPATH', [], ['<library-path>'])
|
||||
flags('gcc.link', 'FINDLIBS-ST', [], ['<find-static-library>'])
|
||||
flags('gcc.link', 'FINDLIBS-SA', [], ['<find-shared-library>'])
|
||||
flags('gcc.link', 'LIBRARIES', [], ['<library-file>'])
|
||||
|
||||
# For <runtime-link>static we made sure there are no dynamic libraries in the
|
||||
# link. On HP-UX not all system libraries exist as archived libraries (for
|
||||
# example, there is no libunwind.a), so, on this platform, the -static option
|
||||
# cannot be specified.
|
||||
if os_name() != 'HPUX':
|
||||
flags('gcc.link', 'OPTIONS', ['<runtime-link>static'], ['-static'])
|
||||
|
||||
# Now, the vendor specific flags.
|
||||
# The parameter linker can be either gnu, darwin, osf, hpux or sun.
|
||||
def init_link_flags(toolset, linker, condition):
|
||||
"""
|
||||
Now, the vendor specific flags.
|
||||
The parameter linker can be either gnu, darwin, osf, hpux or sun.
|
||||
"""
|
||||
toolset_link = toolset + '.link'
|
||||
if linker == 'gnu':
|
||||
# Strip the binary when no debugging is needed. We use --strip-all flag
|
||||
# as opposed to -s since icc (intel's compiler) is generally
|
||||
# option-compatible with and inherits from the gcc toolset, but does not
|
||||
# support -s.
|
||||
|
||||
# FIXME: what does unchecked translate to?
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,--strip-all']) # : unchecked ;
|
||||
flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
|
||||
flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
|
||||
flags(toolset_link, 'START-GROUP', condition, ['-Wl,--start-group'])# : unchecked ;
|
||||
flags(toolset_link, 'END-GROUP', condition, ['-Wl,--end-group']) # : unchecked ;
|
||||
|
||||
# gnu ld has the ability to change the search behaviour for libraries
|
||||
# referenced by -l switch. These modifiers are -Bstatic and -Bdynamic
|
||||
# and change search for -l switches that follow them. The following list
|
||||
# shows the tried variants.
|
||||
# The search stops at the first variant that has a match.
|
||||
# *nix: -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
#
|
||||
# *nix: -Bdynamic -lxxx
|
||||
# libxxx.so
|
||||
# libxxx.a
|
||||
#
|
||||
# windows (mingw,cygwin) -Bstatic -lxxx
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
#
|
||||
# windows (mingw,cygwin) -Bdynamic -lxxx
|
||||
# libxxx.dll.a
|
||||
# xxx.dll.a
|
||||
# libxxx.a
|
||||
# xxx.lib
|
||||
# cygxxx.dll (*)
|
||||
# libxxx.dll
|
||||
# xxx.dll
|
||||
# libxxx.a
|
||||
#
|
||||
# (*) This is for cygwin
|
||||
# Please note that -Bstatic and -Bdynamic are not a guarantee that a
|
||||
# static or dynamic lib indeed gets linked in. The switches only change
|
||||
# search patterns!
|
||||
|
||||
# On *nix mixing shared libs with static runtime is not a good idea.
|
||||
flags(toolset_link, 'FINDLIBS-ST-PFX',
|
||||
map(lambda x: x + '/<runtime-link>shared', condition),
|
||||
['-Wl,-Bstatic']) # : unchecked ;
|
||||
flags(toolset_link, 'FINDLIBS-SA-PFX',
|
||||
map(lambda x: x + '/<runtime-link>shared', condition),
|
||||
['-Wl,-Bdynamic']) # : unchecked ;
|
||||
|
||||
# On windows allow mixing of static and dynamic libs with static
|
||||
# runtime.
|
||||
flags(toolset_link, 'FINDLIBS-ST-PFX',
|
||||
map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
|
||||
['-Wl,-Bstatic']) # : unchecked ;
|
||||
flags(toolset_link, 'FINDLIBS-SA-PFX',
|
||||
map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
|
||||
['-Wl,-Bdynamic']) # : unchecked ;
|
||||
flags(toolset_link, 'OPTIONS',
|
||||
map(lambda x: x + '/<runtime-link>static/<target-os>windows', condition),
|
||||
['-Wl,-Bstatic']) # : unchecked ;
|
||||
|
||||
elif linker == 'darwin':
|
||||
# On Darwin, the -s option to ld does not work unless we pass -static,
|
||||
# and passing -static unconditionally is a bad idea. So, don't pass -s.
|
||||
# at all, darwin.jam will use separate 'strip' invocation.
|
||||
flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
|
||||
flags(toolset_link, 'RPATH_LINK', condition, ['<xdll-path>']) # : unchecked ;
|
||||
|
||||
elif linker == 'osf':
|
||||
# No --strip-all, just -s.
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
|
||||
# : unchecked ;
|
||||
flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
|
||||
# This does not supports -R.
|
||||
flags(toolset_link, 'RPATH_OPTION', condition, ['-rpath']) # : unchecked ;
|
||||
# -rpath-link is not supported at all.
|
||||
|
||||
elif linker == 'sun':
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition), ['-Wl,-s'])
|
||||
# : unchecked ;
|
||||
flags(toolset_link, 'RPATH', condition, ['<dll-path>']) # : unchecked ;
|
||||
# Solaris linker does not have a separate -rpath-link, but allows to use
|
||||
# -L for the same purpose.
|
||||
flags(toolset_link, 'LINKPATH', condition, ['<xdll-path>']) # : unchecked ;
|
||||
|
||||
# This permits shared libraries with non-PIC code on Solaris.
|
||||
# VP, 2004/09/07: Now that we have -fPIC hardcode in link.dll, the
|
||||
# following is not needed. Whether -fPIC should be hardcoded, is a
|
||||
# separate question.
|
||||
# AH, 2004/10/16: it is still necessary because some tests link against
|
||||
# static libraries that were compiled without PIC.
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition), ['-mimpure-text'])
|
||||
# : unchecked ;
|
||||
|
||||
elif linker == 'hpux':
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<debug-symbols>off', condition),
|
||||
['-Wl,-s']) # : unchecked ;
|
||||
flags(toolset_link, 'OPTIONS', map(lambda x: x + '/<link>shared', condition),
|
||||
['-fPIC']) # : unchecked ;
|
||||
|
||||
else:
|
||||
# FIXME:
|
||||
errors.user_error(
|
||||
"$(toolset) initialization: invalid linker '$(linker)' " +
|
||||
"The value '$(linker)' specified for <linker> is not recognized. " +
|
||||
"Possible values are 'gnu', 'darwin', 'osf', 'hpux' or 'sun'")
|
||||
|
||||
# Declare actions for linking.
|
||||
def gcc_link(targets, sources, properties):
|
||||
engine = get_manager().engine()
|
||||
engine.set_target_variable(targets, 'SPACE', ' ')
|
||||
# Serialize execution of the 'link' action, since running N links in
|
||||
# parallel is just slower. For now, serialize only gcc links, it might be a
|
||||
# good idea to serialize all links.
|
||||
engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
|
||||
|
||||
engine.register_action(
|
||||
'gcc.link',
|
||||
'"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
|
||||
'-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
|
||||
'-Wl,-rpath-link$(SPACE)-Wl,"$(RPATH_LINK)" -o "$(<)" ' +
|
||||
'$(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
|
||||
'-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
|
||||
'$(OPTIONS) $(USER_OPTIONS)',
|
||||
function=gcc_link,
|
||||
bound_list=['LIBRARIES'])
|
||||
|
||||
# Default value. Mostly for the sake of intel-linux that inherits from gcc, but
|
||||
# does not have the same logic to set the .AR variable. We can put the same
|
||||
# logic in intel-linux, but that's hardly worth the trouble as on Linux, 'ar' is
|
||||
# always available.
|
||||
__AR = 'ar'
|
||||
|
||||
flags('gcc.archive', 'AROPTIONS', [], ['<archiveflags>'])
|
||||
|
||||
def gcc_archive(targets, sources, properties):
|
||||
# Always remove archive and start again. Here's rationale from
|
||||
#
|
||||
# Andre Hentz:
|
||||
#
|
||||
# I had a file, say a1.c, that was included into liba.a. I moved a1.c to
|
||||
# a2.c, updated my Jamfiles and rebuilt. My program was crashing with absurd
|
||||
# errors. After some debugging I traced it back to the fact that a1.o was
|
||||
# *still* in liba.a
|
||||
#
|
||||
# Rene Rivera:
|
||||
#
|
||||
# Originally removing the archive was done by splicing an RM onto the
|
||||
# archive action. That makes archives fail to build on NT when they have
|
||||
# many files because it will no longer execute the action directly and blow
|
||||
# the line length limit. Instead we remove the file in a different action,
|
||||
# just before building the archive.
|
||||
clean = targets[0] + '(clean)'
|
||||
bjam.call('TEMPORARY', clean)
|
||||
bjam.call('NOCARE', clean)
|
||||
engine = get_manager().engine()
|
||||
engine.set_target_variable('LOCATE', clean, bjam.call('get-target-variable', targets, 'LOCATE'))
|
||||
engine.add_dependency(clean, sources)
|
||||
engine.add_dependency(targets, clean)
|
||||
engine.set_update_action('common.RmTemps', clean, targets, None)
|
||||
|
||||
# Declare action for creating static libraries.
|
||||
# The letter 'r' means to add files to the archive with replacement. Since we
|
||||
# remove archive, we don't care about replacement, but there's no option "add
|
||||
# without replacement".
|
||||
# The letter 'c' suppresses the warning in case the archive does not exists yet.
|
||||
# That warning is produced only on some platforms, for whatever reasons.
|
||||
engine.register_action('gcc.archive',
|
||||
'"$(.AR)" $(AROPTIONS) rc "$(<)" "$(>)"',
|
||||
function=gcc_archive,
|
||||
flags=['piecemeal'])
|
||||
|
||||
def gcc_link_dll(targets, sources, properties):
|
||||
engine = get_manager().engine()
|
||||
engine.set_target_variable(targets, 'SPACE', ' ')
|
||||
engine.set_target_variable(targets, 'JAM_SEMAPHORE', '<s>gcc-link-semaphore')
|
||||
|
||||
engine.register_action(
|
||||
'gcc.link.dll',
|
||||
# Differ from 'link' above only by -shared.
|
||||
'"$(CONFIG_COMMAND)" -L"$(LINKPATH)" ' +
|
||||
'-Wl,$(RPATH_OPTION:E=-R)$(SPACE)-Wl,"$(RPATH)" ' +
|
||||
'"$(.IMPLIB-COMMAND)$(<[1])" -o "$(<[-1])" ' +
|
||||
'$(HAVE_SONAME)-Wl,$(SONAME_OPTION)$(SPACE)-Wl,$(<[-1]:D=) ' +
|
||||
'-shared $(START-GROUP) "$(>)" "$(LIBRARIES)" $(FINDLIBS-ST-PFX) ' +
|
||||
'-l$(FINDLIBS-ST) $(FINDLIBS-SA-PFX) -l$(FINDLIBS-SA) $(END-GROUP) ' +
|
||||
'$(OPTIONS) $(USER_OPTIONS)',
|
||||
function = gcc_link_dll,
|
||||
bound_list=['LIBRARIES'])
|
||||
|
||||
# Set up threading support. It's somewhat contrived, so perform it at the end,
|
||||
# to avoid cluttering other code.
|
||||
|
||||
if on_windows():
|
||||
flags('gcc', 'OPTIONS', ['<threading>multi'], ['-mthreads'])
|
||||
elif bjam.variable('UNIX'):
|
||||
jamuname = bjam.variable('JAMUNAME')
|
||||
host_os_name = jamuname[0]
|
||||
if host_os_name.startswith('SunOS'):
|
||||
flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthreads'])
|
||||
flags('gcc', 'FINDLIBS-SA', [], ['rt'])
|
||||
elif host_os_name == 'BeOS':
|
||||
# BeOS has no threading options, don't set anything here.
|
||||
pass
|
||||
elif host_os_name.endswith('BSD'):
|
||||
flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
|
||||
# there is no -lrt on BSD
|
||||
elif host_os_name == 'DragonFly':
|
||||
flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
|
||||
# there is no -lrt on BSD - DragonFly is a FreeBSD variant,
|
||||
# which anoyingly doesn't say it's a *BSD.
|
||||
elif host_os_name == 'IRIX':
|
||||
# gcc on IRIX does not support multi-threading, don't set anything here.
|
||||
pass
|
||||
elif host_os_name == 'Darwin':
|
||||
# Darwin has no threading options, don't set anything here.
|
||||
pass
|
||||
else:
|
||||
flags('gcc', 'OPTIONS', ['<threading>multi'], ['-pthread'])
|
||||
flags('gcc', 'FINDLIBS-SA', [], ['rt'])
|
||||
|
||||
def cpu_flags(toolset, variable, architecture, instruction_set, values, default=None):
|
||||
#FIXME: for some reason this fails. Probably out of date feature code
|
||||
## if default:
|
||||
## flags(toolset, variable,
|
||||
## ['<architecture>' + architecture + '/<instruction-set>'],
|
||||
## values)
|
||||
flags(toolset, variable,
|
||||
#FIXME: same as above
|
||||
[##'<architecture>/<instruction-set>' + instruction_set,
|
||||
'<architecture>' + architecture + '/<instruction-set>' + instruction_set],
|
||||
values)
|
||||
|
||||
# Set architecture/instruction-set options.
|
||||
#
|
||||
# x86 and compatible
|
||||
flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>32'], ['-m32'])
|
||||
flags('gcc', 'OPTIONS', ['<architecture>x86/<address-model>64'], ['-m64'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'i386', ['-march=i386'], default=True)
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'i486', ['-march=i486'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'i586', ['-march=i586'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'i686', ['-march=i686'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium', ['-march=pentium'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-mmx', ['-march=pentium-mmx'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentiumpro', ['-march=pentiumpro'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium2', ['-march=pentium2'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3', ['-march=pentium3'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium3m', ['-march=pentium3m'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium-m', ['-march=pentium-m'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4', ['-march=pentium4'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'pentium4m', ['-march=pentium4m'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'prescott', ['-march=prescott'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'nocona', ['-march=nocona'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'k6', ['-march=k6'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-2', ['-march=k6-2'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'k6-3', ['-march=k6-3'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon', ['-march=athlon'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-tbird', ['-march=athlon-tbird'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-4', ['-march=athlon-4'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-xp', ['-march=athlon-xp'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-mp', ['-march=athlon-mp'])
|
||||
##
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'k8', ['-march=k8'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'opteron', ['-march=opteron'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon64', ['-march=athlon64'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'athlon-fx', ['-march=athlon-fx'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip-c6', ['-march=winchip-c6'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'winchip2', ['-march=winchip2'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'c3', ['-march=c3'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'x86', 'c3-2', ['-march=c3-2'])
|
||||
# Sparc
|
||||
flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>32'], ['-m32'])
|
||||
flags('gcc', 'OPTIONS', ['<architecture>sparc/<address-model>64'], ['-m64'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'c3', ['-mcpu=c3'], default=True)
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'v7', ['-mcpu=v7'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'cypress', ['-mcpu=cypress'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'v8', ['-mcpu=v8'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'supersparc', ['-mcpu=supersparc'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite', ['-mcpu=sparclite'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'hypersparc', ['-mcpu=hypersparc'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclite86x', ['-mcpu=sparclite86x'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'f930', ['-mcpu=f930'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'f934', ['-mcpu=f934'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'sparclet', ['-mcpu=sparclet'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'tsc701', ['-mcpu=tsc701'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'v9', ['-mcpu=v9'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc', ['-mcpu=ultrasparc'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'sparc', 'ultrasparc3', ['-mcpu=ultrasparc3'])
|
||||
# RS/6000 & PowerPC
|
||||
flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32'], ['-m32'])
|
||||
flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64'], ['-m64'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '403', ['-mcpu=403'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '505', ['-mcpu=505'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '601', ['-mcpu=601'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '602', ['-mcpu=602'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '603', ['-mcpu=603'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '603e', ['-mcpu=603e'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '604', ['-mcpu=604'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '604e', ['-mcpu=604e'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '620', ['-mcpu=620'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '630', ['-mcpu=630'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '740', ['-mcpu=740'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '7400', ['-mcpu=7400'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '7450', ['-mcpu=7450'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '750', ['-mcpu=750'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '801', ['-mcpu=801'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '821', ['-mcpu=821'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '823', ['-mcpu=823'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '860', ['-mcpu=860'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '970', ['-mcpu=970'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', '8540', ['-mcpu=8540'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'power', ['-mcpu=power'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'power2', ['-mcpu=power2'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'power3', ['-mcpu=power3'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'power4', ['-mcpu=power4'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'power5', ['-mcpu=power5'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc', ['-mcpu=powerpc'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'powerpc64', ['-mcpu=powerpc64'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'rios', ['-mcpu=rios'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'rios1', ['-mcpu=rios1'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'rios2', ['-mcpu=rios2'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'rsc', ['-mcpu=rsc'])
|
||||
cpu_flags('gcc', 'OPTIONS', 'power', 'rs64a', ['-mcpu=rs64'])
|
||||
# AIX variant of RS/6000 & PowerPC
|
||||
flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>32/<target-os>aix'], ['-maix32'])
|
||||
flags('gcc', 'OPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-maix64'])
|
||||
flags('gcc', 'AROPTIONS', ['<architecture>power/<address-model>64/<target-os>aix'], ['-X 64'])
|
||||
55
src/tools/make.py
Normal file
55
src/tools/make.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Status: being ported by Vladimir Prus
|
||||
|
||||
# Copyright 2003 Dave Abrahams
|
||||
# Copyright 2003 Douglas Gregor
|
||||
# Copyright 2006 Rene Rivera
|
||||
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
# This module defines the 'make' main target rule.
|
||||
|
||||
from b2.build.targets import BasicTarget
|
||||
from b2.build.virtual_target import Action, FileTarget
|
||||
from b2.build import type
|
||||
from b2.manager import get_manager
|
||||
import b2.build.property_set
|
||||
|
||||
class MakeTarget(BasicTarget):
|
||||
|
||||
def construct(self, name, source_targets, property_set):
|
||||
|
||||
action_name = property_set.get("<action>")[0]
|
||||
|
||||
action = Action(get_manager(), source_targets, action_name, property_set)
|
||||
# FIXME: type.type uses global data.
|
||||
target = FileTarget(self.name(), 1, type.type(self.name()),
|
||||
self.project(), action)
|
||||
return [ b2.build.property_set.empty(),
|
||||
[self.project().manager().virtual_targets().register(target)]]
|
||||
|
||||
def make (target_name, sources, generating_rule,
|
||||
requirements=None, usage_requirements=None):
|
||||
|
||||
target_name = target_name[0]
|
||||
generating_rule = generating_rule[0]
|
||||
|
||||
if not requirements:
|
||||
requirements = []
|
||||
|
||||
requirements.append("<action>%s" % generating_rule)
|
||||
m = get_manager()
|
||||
targets = m.targets()
|
||||
project = m.projects().current()
|
||||
engine = m.engine()
|
||||
engine.register_bjam_action(generating_rule)
|
||||
|
||||
targets.main_target_alternative(MakeTarget(
|
||||
target_name, project,
|
||||
targets.main_target_sources(sources, target_name),
|
||||
targets.main_target_requirements(requirements, project),
|
||||
targets.main_target_default_build([], project),
|
||||
targets.main_target_usage_requirements(usage_requirements or [], project)))
|
||||
|
||||
get_manager().projects().add_rule("make", make)
|
||||
|
||||
83
src/tools/pch.py
Normal file
83
src/tools/pch.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# Status: Being ported by Steven Watanabe
|
||||
# Base revision: 47077
|
||||
#
|
||||
# Copyright (c) 2005 Reece H. Dunn.
|
||||
# Copyright 2006 Ilya Sokolov
|
||||
# Copyright (c) 2008 Steven Watanabe
|
||||
#
|
||||
# Use, modification and distribution is subject to the Boost Software
|
||||
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
##### Using Precompiled Headers (Quick Guide) #####
|
||||
#
|
||||
# Make precompiled mypch.hpp:
|
||||
#
|
||||
# import pch ;
|
||||
#
|
||||
# cpp-pch mypch
|
||||
# : # sources
|
||||
# mypch.hpp
|
||||
# : # requiremnts
|
||||
# <toolset>msvc:<source>mypch.cpp
|
||||
# ;
|
||||
#
|
||||
# Add cpp-pch to sources:
|
||||
#
|
||||
# exe hello
|
||||
# : main.cpp hello.cpp mypch
|
||||
# ;
|
||||
|
||||
from b2.build import type, feature, generators
|
||||
|
||||
type.register('PCH', ['pch'])
|
||||
type.register('C_PCH', [], 'PCH')
|
||||
type.register('CPP_PCH', [], 'PCH')
|
||||
|
||||
# Control precompiled header (PCH) generation.
|
||||
feature.feature('pch',
|
||||
['on', 'off'],
|
||||
['propagated'])
|
||||
|
||||
feature.feature('pch-header', [], ['free', 'dependency'])
|
||||
feature.feature('pch-file', [], ['free', 'dependency'])
|
||||
|
||||
class PchGenerator(generators.Generator):
|
||||
"""
|
||||
Base PCH generator. The 'run' method has the logic to prevent this generator
|
||||
from being run unless it's being used for a top-level PCH target.
|
||||
"""
|
||||
def action_class(self):
|
||||
return 'compile-action'
|
||||
|
||||
def run(self, project, name, prop_set, sources):
|
||||
if not name:
|
||||
# Unless this generator is invoked as the top-most generator for a
|
||||
# main target, fail. This allows using 'H' type as input type for
|
||||
# this generator, while preventing Boost.Build to try this generator
|
||||
# when not explicitly asked for.
|
||||
#
|
||||
# One bad example is msvc, where pch generator produces both PCH
|
||||
# target and OBJ target, so if there's any header generated (like by
|
||||
# bison, or by msidl), we'd try to use pch generator to get OBJ from
|
||||
# that H, which is completely wrong. By restricting this generator
|
||||
# only to pch main target, such problem is solved.
|
||||
pass
|
||||
else:
|
||||
r = self.run_pch(project, name,
|
||||
prop_set.add_raw('<define>BOOST_BUILD_PCH_ENABLED'),
|
||||
sources)
|
||||
return generators.add_usage_requirements(
|
||||
r, ['<define>BOOST_BUILD_PCH_ENABLED'])
|
||||
|
||||
# This rule must be overridden by the derived classes.
|
||||
def run_pch(self, project, name, prop_set, sources):
|
||||
pass
|
||||
|
||||
#FIXME: dummy-generator in builtins.jam needs to be ported.
|
||||
# NOTE: requirements are empty, default pch generator can be applied when
|
||||
# pch=off.
|
||||
###generators.register(
|
||||
### [ new dummy-generator pch.default-c-pch-generator : : C_PCH ] ;
|
||||
###generators.register
|
||||
### [ new dummy-generator pch.default-cpp-pch-generator : : CPP_PCH ] ;
|
||||
189
src/tools/rc.py
Normal file
189
src/tools/rc.py
Normal file
@@ -0,0 +1,189 @@
|
||||
# Status: being ported by Steven Watanabe
|
||||
# Base revision: 47077
|
||||
#
|
||||
# Copyright (C) Andre Hentz 2003. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
#
|
||||
# Copyright (c) 2006 Rene Rivera.
|
||||
#
|
||||
# Copyright (c) 2008 Steven Watanabe
|
||||
#
|
||||
# Use, modification and distribution is subject to the Boost Software
|
||||
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
##import type ;
|
||||
##import generators ;
|
||||
##import feature ;
|
||||
##import errors ;
|
||||
##import scanner ;
|
||||
##import toolset : flags ;
|
||||
|
||||
from b2.build import type, toolset, generators, scanner, feature
|
||||
from b2.tools import builtin
|
||||
from b2.util import regex
|
||||
from b2.build.toolset import flags
|
||||
from b2.manager import get_manager
|
||||
|
||||
__debug = None
|
||||
|
||||
def debug():
|
||||
global __debug
|
||||
if __debug is None:
|
||||
__debug = "--debug-configuration" in bjam.variable("ARGV")
|
||||
return __debug
|
||||
|
||||
type.register('RC', ['rc'])
|
||||
|
||||
def init():
|
||||
pass
|
||||
|
||||
def configure (command = None, condition = None, options = None):
|
||||
"""
|
||||
Configures a new resource compilation command specific to a condition,
|
||||
usually a toolset selection condition. The possible options are:
|
||||
|
||||
* <rc-type>(rc|windres) - Indicates the type of options the command
|
||||
accepts.
|
||||
|
||||
Even though the arguments are all optional, only when a command, condition,
|
||||
and at minimum the rc-type option are given will the command be configured.
|
||||
This is so that callers don't have to check auto-configuration values
|
||||
before calling this. And still get the functionality of build failures when
|
||||
the resource compiler can't be found.
|
||||
"""
|
||||
rc_type = feature.get_values('<rc-type>', options)
|
||||
if rc_type:
|
||||
assert(len(rc_type) == 1)
|
||||
rc_type = rc_type[0]
|
||||
|
||||
if command and condition and rc_type:
|
||||
flags('rc.compile.resource', '.RC', condition, command)
|
||||
flags('rc.compile.resource', '.RC_TYPE', condition, rc_type.lower())
|
||||
flags('rc.compile.resource', 'DEFINES', [], ['<define>'])
|
||||
flags('rc.compile.resource', 'INCLUDES', [], ['<include>'])
|
||||
if debug():
|
||||
print 'notice: using rc compiler ::', condition, '::', command
|
||||
|
||||
engine = get_manager().engine()
|
||||
|
||||
class RCAction:
|
||||
"""Class representing bjam action defined from Python.
|
||||
The function must register the action to execute."""
|
||||
|
||||
def __init__(self, action_name, function):
|
||||
self.action_name = action_name
|
||||
self.function = function
|
||||
|
||||
def __call__(self, targets, sources, property_set):
|
||||
if self.function:
|
||||
self.function(targets, sources, property_set)
|
||||
|
||||
# FIXME: What is the proper way to dispatch actions?
|
||||
def rc_register_action(action_name, function = None):
|
||||
global engine
|
||||
if engine.actions.has_key(action_name):
|
||||
raise "Bjam action %s is already defined" % action_name
|
||||
engine.actions[action_name] = RCAction(action_name, function)
|
||||
|
||||
def rc_compile_resource(targets, sources, properties):
|
||||
rc_type = bjam.call('get-target-variable', targets, '.RC_TYPE')
|
||||
global engine
|
||||
engine.set_update_action('rc.compile.resource.' + rc_type, targets, sources, properties)
|
||||
|
||||
rc_register_action('rc.compile.resource', rc_compile_resource)
|
||||
|
||||
|
||||
engine.register_action(
|
||||
'rc.compile.resource.rc',
|
||||
'"$(.RC)" -l 0x409 "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -fo "$(<)" "$(>)"')
|
||||
|
||||
engine.register_action(
|
||||
'rc.compile.resource.windres',
|
||||
'"$(.RC)" "-U$(UNDEFS)" "-D$(DEFINES)" -I"$(>:D)" -I"$(<:D)" -I"$(INCLUDES)" -o "$(<)" -i "$(>)"')
|
||||
|
||||
# FIXME: this was originally declared quietly
|
||||
engine.register_action(
|
||||
'compile.resource.null',
|
||||
'as /dev/null -o "$(<)"')
|
||||
|
||||
# Since it's a common practice to write
|
||||
# exe hello : hello.cpp hello.rc
|
||||
# we change the name of object created from RC file, to
|
||||
# avoid conflict with hello.cpp.
|
||||
# The reason we generate OBJ and not RES, is that gcc does not
|
||||
# seem to like RES files, but works OK with OBJ.
|
||||
# See http://article.gmane.org/gmane.comp.lib.boost.build/5643/
|
||||
#
|
||||
# Using 'register-c-compiler' adds the build directory to INCLUDES
|
||||
# FIXME: switch to generators
|
||||
builtin.register_c_compiler('rc.compile.resource', ['RC'], ['OBJ(%_res)'], [])
|
||||
|
||||
__angle_include_re = "#include[ ]*<([^<]+)>"
|
||||
|
||||
# Register scanner for resources
|
||||
class ResScanner(scanner.Scanner):
|
||||
|
||||
def __init__(self, includes):
|
||||
scanner.__init__ ;
|
||||
self.includes = includes
|
||||
|
||||
def pattern(self):
|
||||
return "(([^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
|
||||
"[ ]+([^ \"]+|\"[^\"]+\"))|(#include[ ]*(<[^<]+>|\"[^\"]+\")))" ;
|
||||
|
||||
def process(self, target, matches, binding):
|
||||
|
||||
angle = regex.transform(matches, "#include[ ]*<([^<]+)>")
|
||||
quoted = regex.transform(matches, "#include[ ]*\"([^\"]+)\"")
|
||||
res = regex.transform(matches,
|
||||
"[^ ]+[ ]+(BITMAP|CURSOR|FONT|ICON|MESSAGETABLE|RT_MANIFEST)" +\
|
||||
"[ ]+(([^ \"]+)|\"([^\"]+)\")", [3, 4])
|
||||
|
||||
# Icons and other includes may referenced as
|
||||
#
|
||||
# IDR_MAINFRAME ICON "res\\icon.ico"
|
||||
#
|
||||
# so we have to replace double backslashes to single ones.
|
||||
res = [ re.sub(r'\\\\', '/', match) for match in res ]
|
||||
|
||||
# CONSIDER: the new scoping rule seem to defeat "on target" variables.
|
||||
g = bjam.call('get-target-variable', target, 'HDRGRIST')
|
||||
b = os.path.normalize_path(os.path.dirname(binding))
|
||||
|
||||
# Attach binding of including file to included targets.
|
||||
# When target is directly created from virtual target
|
||||
# this extra information is unnecessary. But in other
|
||||
# cases, it allows to distinguish between two headers of the
|
||||
# same name included from different places.
|
||||
# We don't need this extra information for angle includes,
|
||||
# since they should not depend on including file (we can't
|
||||
# get literal "." in include path).
|
||||
g2 = g + "#" + b
|
||||
|
||||
g = "<" + g + ">"
|
||||
g2 = "<" + g2 + ">"
|
||||
angle = [g + x for x in angle]
|
||||
quoted = [g2 + x for x in quoted]
|
||||
res = [g2 + x for x in res]
|
||||
|
||||
all = angle + quoted
|
||||
|
||||
bjam.call('mark-included', target, all)
|
||||
|
||||
engine = get_manager().engine()
|
||||
|
||||
engine.add_dependency(target, res)
|
||||
bjam.call('NOCARE', all + res)
|
||||
engine.set_target_variable(angle, 'SEARCH', ungrist(self.includes))
|
||||
engine.set_target_variable(quoted, 'SEARCH', b + ungrist(self.includes))
|
||||
engine.set_target_variable(res, 'SEARCH', b + ungrist(self.includes)) ;
|
||||
|
||||
# Just propagate current scanner to includes, in a hope
|
||||
# that includes do not change scanners.
|
||||
get_manager().scanners().propagate(self, angle + quoted)
|
||||
|
||||
scanner.register(ResScanner, 'include')
|
||||
type.set_scanner('RC', ResScanner)
|
||||
18
src/tools/types/__init__.py
Normal file
18
src/tools/types/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
__all__ = [
|
||||
'asm',
|
||||
'cpp',
|
||||
'exe',
|
||||
'html',
|
||||
'lib',
|
||||
'obj',
|
||||
'rsp',
|
||||
]
|
||||
|
||||
def register_all ():
|
||||
for i in __all__:
|
||||
m = __import__ (__name__ + '.' + i)
|
||||
reg = i + '.register ()'
|
||||
#exec (reg)
|
||||
|
||||
# TODO: (PF) I thought these would be imported automatically. Anyone knows why they aren't?
|
||||
register_all ()
|
||||
13
src/tools/types/asm.py
Normal file
13
src/tools/types/asm.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright Craig Rodrigues 2005.
|
||||
# Copyright (c) 2008 Steven Watanabe
|
||||
#
|
||||
# Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register():
|
||||
type.register_type('ASM', ['s', 'S', 'asm'])
|
||||
|
||||
register()
|
||||
10
src/tools/types/cpp.py
Normal file
10
src/tools/types/cpp.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
type.register_type ('CPP', ['cpp', 'cxx', 'cc'])
|
||||
|
||||
register ()
|
||||
11
src/tools/types/exe.py
Normal file
11
src/tools/types/exe.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
type.register_type ('EXE', ['exe'], None, ['NT', 'CYGWIN'])
|
||||
type.register_type ('EXE', [], None, [])
|
||||
|
||||
register ()
|
||||
10
src/tools/types/html.py
Normal file
10
src/tools/types/html.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
type.register_type ('HTML', ['html'])
|
||||
|
||||
register ()
|
||||
23
src/tools/types/lib.py
Normal file
23
src/tools/types/lib.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
|
||||
if not type.registered ('LIB'):
|
||||
type.register ('LIB')
|
||||
|
||||
type.register_type ('STATIC_LIB', ['lib', 'a'], 'LIB', ['NT', 'CYGWIN'])
|
||||
type.register_type ('STATIC_LIB', ['a'], 'LIB')
|
||||
|
||||
type.register_type ('IMPORT_LIB', [], 'STATIC_LIB')
|
||||
type.set_generated_target_suffix ('IMPORT_LIB', [], 'lib')
|
||||
|
||||
type.register_type ('SHARED_LIB', ['dll'], 'LIB', ['NT', 'CYGWIN'])
|
||||
type.register_type ('SHARED_LIB', ['so'], 'LIB')
|
||||
|
||||
type.register_type ('SEARCHED_LIB', [], 'LIB')
|
||||
|
||||
register ()
|
||||
11
src/tools/types/obj.py
Normal file
11
src/tools/types/obj.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
type.register_type ('OBJ', ['obj'], None, ['NT', 'CYGWIN'])
|
||||
type.register_type ('OBJ', ['o'])
|
||||
|
||||
register ()
|
||||
10
src/tools/types/rsp.py
Normal file
10
src/tools/types/rsp.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# Copyright David Abrahams 2004. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
from b2.build import type
|
||||
|
||||
def register ():
|
||||
type.register_type ('RSP', ['rsp'])
|
||||
|
||||
register ()
|
||||
150
src/tools/unix.py
Normal file
150
src/tools/unix.py
Normal file
@@ -0,0 +1,150 @@
|
||||
# Copyright (c) 2004 Vladimir Prus.
|
||||
#
|
||||
# Use, modification and distribution is subject to the Boost Software
|
||||
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
|
||||
# http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
""" This file implements linking semantics common to all unixes. On unix, static
|
||||
libraries must be specified in a fixed order on the linker command line. Generators
|
||||
declared there store information about the order and use it properly.
|
||||
"""
|
||||
|
||||
import builtin
|
||||
from b2.build import generators, type
|
||||
from b2.util.utility import *
|
||||
from b2.util import set, sequence
|
||||
|
||||
class UnixLinkingGenerator (builtin.LinkingGenerator):
|
||||
|
||||
def __init__ (self, id, composing, source_types, target_types, requirements):
|
||||
builtin.LinkingGenerator.__init__ (self, id, composing, source_types, target_types, requirements)
|
||||
|
||||
def run (self, project, name, prop_set, sources):
|
||||
result = builtin.LinkingGenerator.run (self, project, name, prop_set, sources)
|
||||
if result:
|
||||
set_library_order (project.manager (), sources, prop_set, result [1])
|
||||
|
||||
return result
|
||||
|
||||
def generated_targets (self, sources, prop_set, project, name):
|
||||
sources2 = []
|
||||
libraries = []
|
||||
for l in sources:
|
||||
if type.is_derived (l.type (), 'LIB'):
|
||||
libraries.append (l)
|
||||
|
||||
else:
|
||||
sources2.append (l)
|
||||
|
||||
sources = sources2 + order_libraries (libraries)
|
||||
|
||||
return builtin.LinkingGenerator.generated_targets (self, sources, prop_set, project, name)
|
||||
|
||||
|
||||
class UnixArchiveGenerator (builtin.ArchiveGenerator):
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
builtin.ArchiveGenerator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run (self, project, name, prop_set, sources):
|
||||
result = builtin.ArchiveGenerator.run(self, project, name, prop_set, sources)
|
||||
set_library_order(project.manager(), sources, prop_set, result)
|
||||
return result
|
||||
|
||||
class UnixSearchedLibGenerator (builtin.SearchedLibGenerator):
|
||||
|
||||
def __init__ (self):
|
||||
builtin.SearchedLibGenerator.__init__ (self)
|
||||
|
||||
def optional_properties (self):
|
||||
return self.requirements ()
|
||||
|
||||
def run (self, project, name, prop_set, sources, multiple):
|
||||
result = SearchedLibGenerator.run (project, name, prop_set, sources, multiple)
|
||||
|
||||
set_library_order (sources, prop_set, result)
|
||||
|
||||
return result
|
||||
|
||||
class UnixPrebuiltLibGenerator (generators.Generator):
|
||||
def __init__ (self, id, composing, source_types, target_types_and_names, requirements):
|
||||
generators.Generator.__init__ (self, id, composing, source_types, target_types_and_names, requirements)
|
||||
|
||||
def run (self, project, name, prop_set, sources, multiple):
|
||||
f = prop_set.get ('<file>')
|
||||
set_library_order_aux (f, sources)
|
||||
return (f, sources)
|
||||
|
||||
### # The derived toolset must specify their own rules and actions.
|
||||
# FIXME: restore?
|
||||
# action.register ('unix.prebuilt', None, None)
|
||||
|
||||
|
||||
generators.register (UnixPrebuiltLibGenerator ('unix.prebuilt', False, [], ['LIB'], ['<file>', '<toolset>unix']))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
### # Declare generators
|
||||
### generators.register [ new UnixLinkingGenerator unix.link : LIB OBJ : EXE
|
||||
### : <toolset>unix ] ;
|
||||
generators.register (UnixArchiveGenerator ('unix.archive', True, ['OBJ'], ['STATIC_LIB'], ['<toolset>unix']))
|
||||
|
||||
### generators.register [ new UnixLinkingGenerator unix.link.dll : LIB OBJ : SHARED_LIB
|
||||
### : <toolset>unix ] ;
|
||||
###
|
||||
### generators.register [ new UnixSearchedLibGenerator
|
||||
### unix.SearchedLibGenerator : : SEARCHED_LIB : <toolset>unix ] ;
|
||||
###
|
||||
###
|
||||
### # The derived toolset must specify their own actions.
|
||||
### actions link {
|
||||
### }
|
||||
###
|
||||
### actions link.dll {
|
||||
### }
|
||||
|
||||
def unix_archive (manager, targets, sources, properties):
|
||||
pass
|
||||
|
||||
# FIXME: restore?
|
||||
#action.register ('unix.archive', unix_archive, [''])
|
||||
|
||||
### actions searched-lib-generator {
|
||||
### }
|
||||
###
|
||||
### actions prebuilt {
|
||||
### }
|
||||
|
||||
|
||||
from b2.util.order import Order
|
||||
__order = Order ()
|
||||
|
||||
def set_library_order_aux (from_libs, to_libs):
|
||||
for f in from_libs:
|
||||
for t in to_libs:
|
||||
if f != t:
|
||||
__order.add_pair (f, t)
|
||||
|
||||
def set_library_order (manager, sources, prop_set, result):
|
||||
used_libraries = []
|
||||
deps = prop_set.dependency ()
|
||||
|
||||
[ sources.append (manager.get_object (get_value (x))) for x in deps ]
|
||||
sources = sequence.unique (sources)
|
||||
|
||||
for l in sources:
|
||||
if l.type () and type.is_derived (l.type (), 'LIB'):
|
||||
used_libraries.append (l)
|
||||
|
||||
created_libraries = []
|
||||
for l in result:
|
||||
if l.type () and type.is_derived (l.type (), 'LIB'):
|
||||
created_libraries.append (l)
|
||||
|
||||
created_libraries = set.difference (created_libraries, used_libraries)
|
||||
set_library_order_aux (created_libraries, used_libraries)
|
||||
|
||||
def order_libraries (libraries):
|
||||
return __order.order (libraries)
|
||||
|
||||
0
src/util/__init__.py
Normal file
0
src/util/__init__.py
Normal file
46
src/util/logger.py
Normal file
46
src/util/logger.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright Pedro Ferreira 2005. Distributed under the Boost
|
||||
# Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
import sys
|
||||
|
||||
class NullLogger:
|
||||
def __init__ (self):
|
||||
self.indent_ = ''
|
||||
|
||||
def log (self, source_name, *args):
|
||||
if self.on () and self.interesting (source_name):
|
||||
self.do_log (self.indent_)
|
||||
for i in args:
|
||||
self.do_log (i)
|
||||
self.do_log ('\n')
|
||||
|
||||
def increase_indent (self):
|
||||
if self.on ():
|
||||
self.indent_ += ' '
|
||||
|
||||
def decrease_indent (self):
|
||||
if self.on () and len (self.indent_) > 4:
|
||||
self.indent_ = self.indent_ [-4:]
|
||||
|
||||
def do_log (self, *args):
|
||||
pass
|
||||
|
||||
def interesting (self, source_name):
|
||||
return False
|
||||
|
||||
def on (self):
|
||||
return False
|
||||
|
||||
class TextLogger (NullLogger):
|
||||
def __init__ (self):
|
||||
NullLogger.__init__ (self)
|
||||
|
||||
def do_log (self, arg):
|
||||
sys.stdout.write (str (arg))
|
||||
|
||||
def interesting (self, source_name):
|
||||
return True
|
||||
|
||||
def on (self):
|
||||
return True
|
||||
121
src/util/order.py
Normal file
121
src/util/order.py
Normal file
@@ -0,0 +1,121 @@
|
||||
# Copyright (C) 2003 Vladimir Prus
|
||||
# Use, modification, and distribution is subject to the Boost Software
|
||||
# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
|
||||
# at http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
class Order:
|
||||
"""Allows ordering arbitrary objects with regard to arbitrary binary relation.
|
||||
|
||||
The primary use case is the gcc toolset, which is sensitive to
|
||||
library order: if library 'a' uses symbols from library 'b',
|
||||
then 'a' must be present before 'b' on the linker's command line.
|
||||
|
||||
This requirement can be lifted for gcc with GNU ld, but for gcc with
|
||||
Solaris LD (and for Solaris toolset as well), the order always matters.
|
||||
|
||||
So, we need to store order requirements and then order libraries
|
||||
according to them. It it not possible to use dependency graph as
|
||||
order requirements. What we need is "use symbols" relationship
|
||||
while dependency graph provides "needs to be updated" relationship.
|
||||
|
||||
For example::
|
||||
lib a : a.cpp b;
|
||||
lib b ;
|
||||
|
||||
For static linking, the 'a' library need not depend on 'b'. However, it
|
||||
still should come before 'b' on the command line.
|
||||
"""
|
||||
|
||||
def __init__ (self):
|
||||
self.constraints_ = []
|
||||
|
||||
def add_pair (self, first, second):
|
||||
""" Adds the constraint that 'first' should precede 'second'.
|
||||
"""
|
||||
self.constraints_.append ((first, second))
|
||||
|
||||
def order (self, objects):
|
||||
""" Given a list of objects, reorder them so that the constains specified
|
||||
by 'add_pair' are satisfied.
|
||||
|
||||
The algorithm was adopted from an awk script by Nikita Youshchenko
|
||||
(yoush at cs dot msu dot su)
|
||||
"""
|
||||
# The algorithm used is the same is standard transitive closure,
|
||||
# except that we're not keeping in-degree for all vertices, but
|
||||
# rather removing edges.
|
||||
result = []
|
||||
|
||||
if not objects:
|
||||
return result
|
||||
|
||||
constraints = self.__eliminate_unused_constraits (objects)
|
||||
|
||||
# Find some library that nobody depends upon and add it to
|
||||
# the 'result' array.
|
||||
obj = None
|
||||
while objects:
|
||||
new_objects = []
|
||||
while objects:
|
||||
obj = objects [0]
|
||||
|
||||
if self.__has_no_dependents (obj, constraints):
|
||||
# Emulate break ;
|
||||
new_objects.extend (objects [1:])
|
||||
objects = []
|
||||
|
||||
else:
|
||||
new_objects.append (obj)
|
||||
obj = None
|
||||
objects = objects [1:]
|
||||
|
||||
if not obj:
|
||||
raise BaseException ("Circular order dependencies")
|
||||
|
||||
# No problem with placing first.
|
||||
result.append (obj)
|
||||
|
||||
# Remove all containts where 'obj' comes first,
|
||||
# since they are already satisfied.
|
||||
constraints = self.__remove_satisfied (constraints, obj)
|
||||
|
||||
# Add the remaining objects for further processing
|
||||
# on the next iteration
|
||||
objects = new_objects
|
||||
|
||||
return result
|
||||
|
||||
def __eliminate_unused_constraits (self, objects):
|
||||
""" Eliminate constraints which mention objects not in 'objects'.
|
||||
In graph-theory terms, this is finding subgraph induced by
|
||||
ordered vertices.
|
||||
"""
|
||||
result = []
|
||||
for c in self.constraints_:
|
||||
if c [0] in objects and c [1] in objects:
|
||||
result.append (c)
|
||||
|
||||
return result
|
||||
|
||||
def __has_no_dependents (self, obj, constraints):
|
||||
""" Returns true if there's no constraint in 'constraints' where
|
||||
'obj' comes second.
|
||||
"""
|
||||
failed = False
|
||||
while constraints and not failed:
|
||||
c = constraints [0]
|
||||
|
||||
if c [1] == obj:
|
||||
failed = True
|
||||
|
||||
constraints = constraints [1:]
|
||||
|
||||
return not failed
|
||||
|
||||
def __remove_satisfied (self, constraints, obj):
|
||||
result = []
|
||||
for c in constraints:
|
||||
if c [0] != obj:
|
||||
result.append (c)
|
||||
|
||||
return result
|
||||
922
src/util/path.py
Normal file
922
src/util/path.py
Normal file
@@ -0,0 +1,922 @@
|
||||
# Status: this module is ported on demand by however needs something
|
||||
# from it. Functionality that is not needed by Python port will
|
||||
# be dropped.
|
||||
|
||||
# Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
# Performs various path manipulations. Path are always in a 'normilized'
|
||||
# representation. In it, a path may be either:
|
||||
#
|
||||
# - '.', or
|
||||
#
|
||||
# - ['/'] [ ( '..' '/' )* (token '/')* token ]
|
||||
#
|
||||
# In plain english, path can be rooted, '..' elements are allowed only
|
||||
# at the beginning, and it never ends in slash, except for path consisting
|
||||
# of slash only.
|
||||
|
||||
import os.path
|
||||
from utility import to_seq
|
||||
from glob import glob as builtin_glob
|
||||
|
||||
def root (path, root):
|
||||
""" If 'path' is relative, it is rooted at 'root'. Otherwise, it's unchanged.
|
||||
"""
|
||||
if os.path.isabs (path):
|
||||
return path
|
||||
else:
|
||||
return os.path.join (root, path)
|
||||
|
||||
def make (native):
|
||||
""" Converts the native path into normalized form.
|
||||
"""
|
||||
# TODO: make os selection here.
|
||||
return make_UNIX (native)
|
||||
|
||||
def make_UNIX (native):
|
||||
|
||||
# VP: I have no idea now 'native' can be empty here! But it can!
|
||||
assert (native)
|
||||
|
||||
return os.path.normpath (native)
|
||||
|
||||
def native (path):
|
||||
""" Builds a native representation of the path.
|
||||
"""
|
||||
# TODO: make os selection here.
|
||||
return native_UNIX (path)
|
||||
|
||||
def native_UNIX (path):
|
||||
return path
|
||||
|
||||
|
||||
def pwd ():
|
||||
""" Returns the current working directory.
|
||||
# TODO: is it a good idea to use the current dir? Some use-cases
|
||||
may not allow us to depend on the current dir.
|
||||
"""
|
||||
return make (os.getcwd ())
|
||||
|
||||
def is_rooted (path):
|
||||
""" Tests if a path is rooted.
|
||||
"""
|
||||
return path and path [0] == '/'
|
||||
|
||||
|
||||
###################################################################
|
||||
# Still to port.
|
||||
# Original lines are prefixed with "# "
|
||||
#
|
||||
# # Copyright (C) Vladimir Prus 2002. Permission to copy, use, modify, sell and
|
||||
# # distribute this software is granted provided this copyright notice appears in
|
||||
# # all copies. This software is provided "as is" without express or implied
|
||||
# # warranty, and with no claim as to its suitability for any purpose.
|
||||
#
|
||||
# # Performs various path manipulations. Path are always in a 'normilized'
|
||||
# # representation. In it, a path may be either:
|
||||
# #
|
||||
# # - '.', or
|
||||
# #
|
||||
# # - ['/'] [ ( '..' '/' )* (token '/')* token ]
|
||||
# #
|
||||
# # In plain english, path can be rooted, '..' elements are allowed only
|
||||
# # at the beginning, and it never ends in slash, except for path consisting
|
||||
# # of slash only.
|
||||
#
|
||||
# import modules ;
|
||||
# import sequence ;
|
||||
# import regex ;
|
||||
# import errors : error ;
|
||||
#
|
||||
#
|
||||
# os = [ modules.peek : OS ] ;
|
||||
# if [ modules.peek : UNIX ]
|
||||
# {
|
||||
# local uname = [ modules.peek : JAMUNAME ] ;
|
||||
# switch $(uname)
|
||||
# {
|
||||
# case CYGWIN* :
|
||||
# os = CYGWIN ;
|
||||
#
|
||||
# case * :
|
||||
# os = UNIX ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Tests if a path is rooted.
|
||||
# #
|
||||
# rule is-rooted ( path )
|
||||
# {
|
||||
# return [ MATCH "^(/)" : $(path) ] ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Tests if a path has a parent.
|
||||
# #
|
||||
# rule has-parent ( path )
|
||||
# {
|
||||
# if $(path) != / {
|
||||
# return 1 ;
|
||||
# } else {
|
||||
# return ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Returns the path without any directory components.
|
||||
# #
|
||||
# rule basename ( path )
|
||||
# {
|
||||
# return [ MATCH "([^/]+)$" : $(path) ] ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Returns parent directory of the path. If no parent exists, error is issued.
|
||||
# #
|
||||
# rule parent ( path )
|
||||
# {
|
||||
# if [ has-parent $(path) ] {
|
||||
#
|
||||
# if $(path) = . {
|
||||
# return .. ;
|
||||
# } else {
|
||||
#
|
||||
# # Strip everything at the end of path up to and including
|
||||
# # the last slash
|
||||
# local result = [ regex.match "((.*)/)?([^/]+)" : $(path) : 2 3 ] ;
|
||||
#
|
||||
# # Did we strip what we shouldn't?
|
||||
# if $(result[2]) = ".." {
|
||||
# return $(path)/.. ;
|
||||
# } else {
|
||||
# if ! $(result[1]) {
|
||||
# if [ is-rooted $(path) ] {
|
||||
# result = / ;
|
||||
# } else {
|
||||
# result = . ;
|
||||
# }
|
||||
# }
|
||||
# return $(result[1]) ;
|
||||
# }
|
||||
# }
|
||||
# } else {
|
||||
# error "Path '$(path)' has no parent" ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Returns path2 such that "[ join path path2 ] = .".
|
||||
# # The path may not contain ".." element or be rooted.
|
||||
# #
|
||||
# rule reverse ( path )
|
||||
# {
|
||||
# if $(path) = .
|
||||
# {
|
||||
# return $(path) ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# local tokens = [ regex.split $(path) "/" ] ;
|
||||
# local tokens2 ;
|
||||
# for local i in $(tokens) {
|
||||
# tokens2 += .. ;
|
||||
# }
|
||||
# return [ sequence.join $(tokens2) : "/" ] ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Auxillary rule: does all the semantic of 'join', except for error cheching.
|
||||
# # The error checking is separated because this rule is recursive, and I don't
|
||||
# # like the idea of checking the same input over and over.
|
||||
# #
|
||||
# local rule join-imp ( elements + )
|
||||
# {
|
||||
# return [ NORMALIZE_PATH $(elements:J="/") ] ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Contanenates the passed path elements. Generates an error if
|
||||
# # any element other than the first one is rooted.
|
||||
# #
|
||||
# rule join ( elements + )
|
||||
# {
|
||||
# if ! $(elements[2])
|
||||
# {
|
||||
# return $(elements[1]) ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# for local e in $(elements[2-])
|
||||
# {
|
||||
# if [ is-rooted $(e) ]
|
||||
# {
|
||||
# error only first element may be rooted ;
|
||||
# }
|
||||
# }
|
||||
# return [ join-imp $(elements) ] ;
|
||||
# }
|
||||
# }
|
||||
|
||||
|
||||
def glob (dirs, patterns):
|
||||
""" Returns the list of files matching the given pattern in the
|
||||
specified directory. Both directories and patterns are
|
||||
supplied as portable paths. Each pattern should be non-absolute
|
||||
path, and can't contain "." or ".." elements. Each slash separated
|
||||
element of pattern can contain the following special characters:
|
||||
- '?', which match any character
|
||||
- '*', which matches arbitrary number of characters.
|
||||
A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
|
||||
if and only if e1 matches p1, e2 matches p2 and so on.
|
||||
|
||||
For example:
|
||||
[ glob . : *.cpp ]
|
||||
[ glob . : */build/Jamfile ]
|
||||
"""
|
||||
# {
|
||||
# local result ;
|
||||
# if $(patterns:D)
|
||||
# {
|
||||
# # When a pattern has a directory element, we first glob for
|
||||
# # directory, and then glob for file name is the found directories.
|
||||
# for local p in $(patterns)
|
||||
# {
|
||||
# # First glob for directory part.
|
||||
# local globbed-dirs = [ glob $(dirs) : $(p:D) ] ;
|
||||
# result += [ glob $(globbed-dirs) : $(p:D="") ] ;
|
||||
# }
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# # When a pattern has not directory, we glob directly.
|
||||
# # Take care of special ".." value. The "GLOB" rule simply ignores
|
||||
# # the ".." element (and ".") element in directory listings. This is
|
||||
# # needed so that
|
||||
# #
|
||||
# # [ glob libs/*/Jamfile ]
|
||||
# #
|
||||
# # don't return
|
||||
# #
|
||||
# # libs/../Jamfile (which is the same as ./Jamfile)
|
||||
# #
|
||||
# # On the other hand, when ".." is explicitly present in the pattern
|
||||
# # we need to return it.
|
||||
# #
|
||||
# for local dir in $(dirs)
|
||||
# {
|
||||
# for local p in $(patterns)
|
||||
# {
|
||||
# if $(p) != ".."
|
||||
# {
|
||||
# result += [ sequence.transform make
|
||||
# : [ GLOB [ native $(dir) ] : $(p) ] ] ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# result += [ path.join $(dir) .. ] ;
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
# return $(result) ;
|
||||
# }
|
||||
#
|
||||
|
||||
# TODO: (PF) I replaced the code above by this. I think it should work but needs to be tested.
|
||||
result = []
|
||||
dirs = to_seq (dirs)
|
||||
patterns = to_seq (patterns)
|
||||
|
||||
splitdirs = []
|
||||
for dir in dirs:
|
||||
splitdirs += dir.split (os.pathsep)
|
||||
|
||||
for dir in splitdirs:
|
||||
for pattern in patterns:
|
||||
p = os.path.join (dir, pattern)
|
||||
import glob
|
||||
result.extend (glob.glob (p))
|
||||
return result
|
||||
|
||||
# #
|
||||
# # Returns true is the specified file exists.
|
||||
# #
|
||||
# rule exists ( file )
|
||||
# {
|
||||
# return [ path.glob $(file:D) : $(file:D=) ] ;
|
||||
# }
|
||||
# NATIVE_RULE path : exists ;
|
||||
#
|
||||
#
|
||||
#
|
||||
# #
|
||||
# # Find out the absolute name of path and returns the list of all the parents,
|
||||
# # starting with the immediate one. Parents are returned as relative names.
|
||||
# # If 'upper_limit' is specified, directories above it will be pruned.
|
||||
# #
|
||||
# rule all-parents ( path : upper_limit ? : cwd ? )
|
||||
# {
|
||||
# cwd ?= [ pwd ] ;
|
||||
# local path_ele = [ regex.split [ root $(path) $(cwd) ] "/" ] ;
|
||||
#
|
||||
# if ! $(upper_limit) {
|
||||
# upper_limit = / ;
|
||||
# }
|
||||
# local upper_ele = [ regex.split [ root $(upper_limit) $(cwd) ] "/" ] ;
|
||||
#
|
||||
# # Leave only elements in 'path_ele' below 'upper_ele'
|
||||
# while $(path_ele) && $(upper_ele[1]) = $(path_ele[1]) {
|
||||
# upper_ele = $(upper_ele[2-]) ;
|
||||
# path_ele = $(path_ele[2-]) ;
|
||||
# }
|
||||
#
|
||||
# # All upper elements removed ?
|
||||
# if ! $(upper_ele) {
|
||||
# # Create the relative paths to parents, number of elements in 'path_ele'
|
||||
# local result ;
|
||||
# for local i in $(path_ele) {
|
||||
# path = [ parent $(path) ] ;
|
||||
# result += $(path) ;
|
||||
# }
|
||||
# return $(result) ;
|
||||
# }
|
||||
# else {
|
||||
# error "$(upper_limit) is not prefix of $(path)" ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
#
|
||||
# #
|
||||
# # Search for 'pattern' in parent directories of 'dir', up till and including
|
||||
# # 'upper_limit', if it is specified, or till the filesystem root otherwise.
|
||||
# #
|
||||
# rule glob-in-parents ( dir : patterns + : upper-limit ? )
|
||||
# {
|
||||
# local result ;
|
||||
# local parent-dirs = [ all-parents $(dir) : $(upper-limit) ] ;
|
||||
#
|
||||
# while $(parent-dirs) && ! $(result)
|
||||
# {
|
||||
# result = [ glob $(parent-dirs[1]) : $(patterns) ] ;
|
||||
# parent-dirs = $(parent-dirs[2-]) ;
|
||||
# }
|
||||
# return $(result) ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Assuming 'child' is a subdirectory of 'parent', return the relative
|
||||
# # path from 'parent' to 'child'
|
||||
# #
|
||||
# rule relative ( child parent )
|
||||
# {
|
||||
# if $(parent) = "."
|
||||
# {
|
||||
# return $(child) ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# local split1 = [ regex.split $(parent) / ] ;
|
||||
# local split2 = [ regex.split $(child) / ] ;
|
||||
#
|
||||
# while $(split1)
|
||||
# {
|
||||
# if $(split1[1]) = $(split2[1])
|
||||
# {
|
||||
# split1 = $(split1[2-]) ;
|
||||
# split2 = $(split2[2-]) ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# errors.error $(child) is not a subdir of $(parent) ;
|
||||
# }
|
||||
# }
|
||||
# return [ join $(split2) ] ;
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# # Returns the minimal path to path2 that is relative path1.
|
||||
# #
|
||||
# rule relative-to ( path1 path2 )
|
||||
# {
|
||||
# local root_1 = [ regex.split [ reverse $(path1) ] / ] ;
|
||||
# local split1 = [ regex.split $(path1) / ] ;
|
||||
# local split2 = [ regex.split $(path2) / ] ;
|
||||
#
|
||||
# while $(split1) && $(root_1)
|
||||
# {
|
||||
# if $(split1[1]) = $(split2[1])
|
||||
# {
|
||||
# root_1 = $(root_1[2-]) ;
|
||||
# split1 = $(split1[2-]) ;
|
||||
# split2 = $(split2[2-]) ;
|
||||
# }
|
||||
# else
|
||||
# {
|
||||
# split1 = ;
|
||||
# }
|
||||
# }
|
||||
# return [ join . $(root_1) $(split2) ] ;
|
||||
# }
|
||||
|
||||
# Returns the list of paths which are used by the operating system
|
||||
# for looking up programs
|
||||
def programs_path ():
|
||||
raw = []
|
||||
names = ['PATH', 'Path', 'path']
|
||||
|
||||
for name in names:
|
||||
raw.append(os.environ.get (name, ''))
|
||||
|
||||
result = []
|
||||
for elem in raw:
|
||||
if elem:
|
||||
for p in elem.split(os.path.pathsep):
|
||||
result.append(make(p))
|
||||
|
||||
return result
|
||||
|
||||
# rule make-NT ( native )
|
||||
# {
|
||||
# local tokens = [ regex.split $(native) "[/\\]" ] ;
|
||||
# local result ;
|
||||
#
|
||||
# # Handle paths ending with slashes
|
||||
# if $(tokens[-1]) = ""
|
||||
# {
|
||||
# tokens = $(tokens[1--2]) ; # discard the empty element
|
||||
# }
|
||||
#
|
||||
# result = [ path.join $(tokens) ] ;
|
||||
#
|
||||
# if [ regex.match "(^.:)" : $(native) ]
|
||||
# {
|
||||
# result = /$(result) ;
|
||||
# }
|
||||
#
|
||||
# if $(native) = ""
|
||||
# {
|
||||
# result = "." ;
|
||||
# }
|
||||
#
|
||||
# return $(result) ;
|
||||
# }
|
||||
#
|
||||
# rule native-NT ( path )
|
||||
# {
|
||||
# local result = [ MATCH "^/?(.*)" : $(path) ] ;
|
||||
# result = [ sequence.join [ regex.split $(result) "/" ] : "\\" ] ;
|
||||
# return $(result) ;
|
||||
# }
|
||||
#
|
||||
# rule make-CYGWIN ( path )
|
||||
# {
|
||||
# return [ make-NT $(path) ] ;
|
||||
# }
|
||||
#
|
||||
# rule native-CYGWIN ( path )
|
||||
# {
|
||||
# local result = $(path) ;
|
||||
# if [ regex.match "(^/.:)" : $(path) ] # win absolute
|
||||
# {
|
||||
# result = [ MATCH "^/?(.*)" : $(path) ] ; # remove leading '/'
|
||||
# }
|
||||
# return [ native-UNIX $(result) ] ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # split-VMS: splits input native path into
|
||||
# # device dir file (each part is optional),
|
||||
# # example:
|
||||
# #
|
||||
# # dev:[dir]file.c => dev: [dir] file.c
|
||||
# #
|
||||
# rule split-path-VMS ( native )
|
||||
# {
|
||||
# local matches = [ MATCH ([a-zA-Z0-9_-]+:)?(\\[[^\]]*\\])?(.*)?$ : $(native) ] ;
|
||||
# local device = $(matches[1]) ;
|
||||
# local dir = $(matches[2]) ;
|
||||
# local file = $(matches[3]) ;
|
||||
#
|
||||
# return $(device) $(dir) $(file) ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Converts a native VMS path into a portable path spec.
|
||||
# #
|
||||
# # Does not handle current-device absolute paths such
|
||||
# # as "[dir]File.c" as it is not clear how to represent
|
||||
# # them in the portable path notation.
|
||||
# #
|
||||
# # Adds a trailing dot (".") to the file part if no extension
|
||||
# # is present (helps when converting it back into native path).
|
||||
# #
|
||||
# rule make-VMS ( native )
|
||||
# {
|
||||
# if [ MATCH ^(\\[[a-zA-Z0-9]) : $(native) ]
|
||||
# {
|
||||
# errors.error "Can't handle default-device absolute paths: " $(native) ;
|
||||
# }
|
||||
#
|
||||
# local parts = [ split-path-VMS $(native) ] ;
|
||||
# local device = $(parts[1]) ;
|
||||
# local dir = $(parts[2]) ;
|
||||
# local file = $(parts[3]) ;
|
||||
# local elems ;
|
||||
#
|
||||
# if $(device)
|
||||
# {
|
||||
# #
|
||||
# # rooted
|
||||
# #
|
||||
# elems = /$(device) ;
|
||||
# }
|
||||
#
|
||||
# if $(dir) = "[]"
|
||||
# {
|
||||
# #
|
||||
# # Special case: current directory
|
||||
# #
|
||||
# elems = $(elems) "." ;
|
||||
# }
|
||||
# else if $(dir)
|
||||
# {
|
||||
# dir = [ regex.replace $(dir) "\\[|\\]" "" ] ;
|
||||
# local dir_parts = [ regex.split $(dir) \\. ] ;
|
||||
#
|
||||
# if $(dir_parts[1]) = ""
|
||||
# {
|
||||
# #
|
||||
# # Relative path
|
||||
# #
|
||||
# dir_parts = $(dir_parts[2--1]) ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # replace "parent-directory" parts (- => ..)
|
||||
# #
|
||||
# dir_parts = [ regex.replace-list $(dir_parts) : - : .. ] ;
|
||||
#
|
||||
# elems = $(elems) $(dir_parts) ;
|
||||
# }
|
||||
#
|
||||
# if $(file)
|
||||
# {
|
||||
# if ! [ MATCH (\\.) : $(file) ]
|
||||
# {
|
||||
# #
|
||||
# # Always add "." to end of non-extension file
|
||||
# #
|
||||
# file = $(file). ;
|
||||
# }
|
||||
# elems = $(elems) $(file) ;
|
||||
# }
|
||||
#
|
||||
# local portable = [ path.join $(elems) ] ;
|
||||
#
|
||||
# return $(portable) ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Converts a portable path spec into a native VMS path.
|
||||
# #
|
||||
# # Relies on having at least one dot (".") included in the file
|
||||
# # name to be able to differentiate it ftom the directory part.
|
||||
# #
|
||||
# rule native-VMS ( path )
|
||||
# {
|
||||
# local device = "" ;
|
||||
# local dir = $(path) ;
|
||||
# local file = "" ;
|
||||
# local native ;
|
||||
# local split ;
|
||||
#
|
||||
# #
|
||||
# # Has device ?
|
||||
# #
|
||||
# if [ is-rooted $(dir) ]
|
||||
# {
|
||||
# split = [ MATCH ^/([^:]+:)/?(.*) : $(dir) ] ;
|
||||
# device = $(split[1]) ;
|
||||
# dir = $(split[2]) ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Has file ?
|
||||
# #
|
||||
# # This is no exact science, just guess work:
|
||||
# #
|
||||
# # If the last part of the current path spec
|
||||
# # includes some chars, followed by a dot,
|
||||
# # optionally followed by more chars -
|
||||
# # then it is a file (keep your fingers crossed).
|
||||
# #
|
||||
# split = [ regex.split $(dir) / ] ;
|
||||
# local maybe_file = $(split[-1]) ;
|
||||
#
|
||||
# if [ MATCH ^([^.]+\\..*) : $(maybe_file) ]
|
||||
# {
|
||||
# file = $(maybe_file) ;
|
||||
# dir = [ sequence.join $(split[1--2]) : / ] ;
|
||||
# }
|
||||
#
|
||||
# #
|
||||
# # Has dir spec ?
|
||||
# #
|
||||
# if $(dir) = "."
|
||||
# {
|
||||
# dir = "[]" ;
|
||||
# }
|
||||
# else if $(dir)
|
||||
# {
|
||||
# dir = [ regex.replace $(dir) \\.\\. - ] ;
|
||||
# dir = [ regex.replace $(dir) / . ] ;
|
||||
#
|
||||
# if $(device) = ""
|
||||
# {
|
||||
# #
|
||||
# # Relative directory
|
||||
# #
|
||||
# dir = "."$(dir) ;
|
||||
# }
|
||||
# dir = "["$(dir)"]" ;
|
||||
# }
|
||||
#
|
||||
# native = [ sequence.join $(device) $(dir) $(file) ] ;
|
||||
#
|
||||
# return $(native) ;
|
||||
# }
|
||||
#
|
||||
#
|
||||
# rule __test__ ( ) {
|
||||
#
|
||||
# import assert ;
|
||||
# import errors : try catch ;
|
||||
#
|
||||
# assert.true is-rooted "/" ;
|
||||
# assert.true is-rooted "/foo" ;
|
||||
# assert.true is-rooted "/foo/bar" ;
|
||||
# assert.result : is-rooted "." ;
|
||||
# assert.result : is-rooted "foo" ;
|
||||
# assert.result : is-rooted "foo/bar" ;
|
||||
#
|
||||
# assert.true has-parent "foo" ;
|
||||
# assert.true has-parent "foo/bar" ;
|
||||
# assert.true has-parent "." ;
|
||||
# assert.result : has-parent "/" ;
|
||||
#
|
||||
# assert.result "." : basename "." ;
|
||||
# assert.result ".." : basename ".." ;
|
||||
# assert.result "foo" : basename "foo" ;
|
||||
# assert.result "foo" : basename "bar/foo" ;
|
||||
# assert.result "foo" : basename "gaz/bar/foo" ;
|
||||
# assert.result "foo" : basename "/gaz/bar/foo" ;
|
||||
#
|
||||
# assert.result "." : parent "foo" ;
|
||||
# assert.result "/" : parent "/foo" ;
|
||||
# assert.result "foo/bar" : parent "foo/bar/giz" ;
|
||||
# assert.result ".." : parent "." ;
|
||||
# assert.result ".." : parent "../foo" ;
|
||||
# assert.result "../../foo" : parent "../../foo/bar" ;
|
||||
#
|
||||
#
|
||||
# assert.result "." : reverse "." ;
|
||||
# assert.result ".." : reverse "foo" ;
|
||||
# assert.result "../../.." : reverse "foo/bar/giz" ;
|
||||
#
|
||||
# assert.result "foo" : join "foo" ;
|
||||
# assert.result "/foo" : join "/" "foo" ;
|
||||
# assert.result "foo/bar" : join "foo" "bar" ;
|
||||
# assert.result "foo/bar" : join "foo/giz" "../bar" ;
|
||||
# assert.result "foo/giz" : join "foo/bar/baz" "../../giz" ;
|
||||
# assert.result ".." : join "." ".." ;
|
||||
# assert.result ".." : join "foo" "../.." ;
|
||||
# assert.result "../.." : join "../foo" "../.." ;
|
||||
# assert.result "/foo" : join "/bar" "../foo" ;
|
||||
# assert.result "foo/giz" : join "foo/giz" "." ;
|
||||
# assert.result "." : join lib2 ".." ;
|
||||
# assert.result "/" : join "/a" ".." ;
|
||||
#
|
||||
# assert.result /a/b : join /a/b/c .. ;
|
||||
#
|
||||
# assert.result "foo/bar/giz" : join "foo" "bar" "giz" ;
|
||||
# assert.result "giz" : join "foo" ".." "giz" ;
|
||||
# assert.result "foo/giz" : join "foo" "." "giz" ;
|
||||
#
|
||||
# try ;
|
||||
# {
|
||||
# join "a" "/b" ;
|
||||
# }
|
||||
# catch only first element may be rooted ;
|
||||
#
|
||||
# local CWD = "/home/ghost/build" ;
|
||||
# assert.result : all-parents . : . : $(CWD) ;
|
||||
# assert.result . .. ../.. ../../.. : all-parents "Jamfile" : "" : $(CWD) ;
|
||||
# assert.result foo . .. ../.. ../../.. : all-parents "foo/Jamfile" : "" : $(CWD) ;
|
||||
# assert.result ../Work .. ../.. ../../.. : all-parents "../Work/Jamfile" : "" : $(CWD) ;
|
||||
#
|
||||
# local CWD = "/home/ghost" ;
|
||||
# assert.result . .. : all-parents "Jamfile" : "/home" : $(CWD) ;
|
||||
# assert.result . : all-parents "Jamfile" : "/home/ghost" : $(CWD) ;
|
||||
#
|
||||
# assert.result "c/d" : relative "a/b/c/d" "a/b" ;
|
||||
# assert.result "foo" : relative "foo" "." ;
|
||||
#
|
||||
# local save-os = [ modules.peek path : os ] ;
|
||||
# modules.poke path : os : NT ;
|
||||
#
|
||||
# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
|
||||
# assert.result "foo/bar/giz" : make "foo\\bar\\giz" ;
|
||||
# assert.result "foo" : make "foo/." ;
|
||||
# assert.result "foo" : make "foo/bar/.." ;
|
||||
# assert.result "/D:/My Documents" : make "D:\\My Documents" ;
|
||||
# assert.result "/c:/boost/tools/build/new/project.jam" : make "c:\\boost\\tools\\build\\test\\..\\new\\project.jam" ;
|
||||
#
|
||||
# assert.result "foo\\bar\\giz" : native "foo/bar/giz" ;
|
||||
# assert.result "foo" : native "foo" ;
|
||||
# assert.result "D:\\My Documents\\Work" : native "/D:/My Documents/Work" ;
|
||||
#
|
||||
# modules.poke path : os : UNIX ;
|
||||
#
|
||||
# assert.result "foo/bar/giz" : make "foo/bar/giz" ;
|
||||
# assert.result "/sub1" : make "/sub1/." ;
|
||||
# assert.result "/sub1" : make "/sub1/sub2/.." ;
|
||||
# assert.result "sub1" : make "sub1/." ;
|
||||
# assert.result "sub1" : make "sub1/sub2/.." ;
|
||||
# assert.result "/foo/bar" : native "/foo/bar" ;
|
||||
#
|
||||
# modules.poke path : os : VMS ;
|
||||
#
|
||||
# #
|
||||
# # Don't really need to poke os before these
|
||||
# #
|
||||
# assert.result "disk:" "[dir]" "file" : split-path-VMS "disk:[dir]file" ;
|
||||
# assert.result "disk:" "[dir]" "" : split-path-VMS "disk:[dir]" ;
|
||||
# assert.result "disk:" "" "" : split-path-VMS "disk:" ;
|
||||
# assert.result "disk:" "" "file" : split-path-VMS "disk:file" ;
|
||||
# assert.result "" "[dir]" "file" : split-path-VMS "[dir]file" ;
|
||||
# assert.result "" "[dir]" "" : split-path-VMS "[dir]" ;
|
||||
# assert.result "" "" "file" : split-path-VMS "file" ;
|
||||
# assert.result "" "" "" : split-path-VMS "" ;
|
||||
#
|
||||
# #
|
||||
# # Special case: current directory
|
||||
# #
|
||||
# assert.result "" "[]" "" : split-path-VMS "[]" ;
|
||||
# assert.result "disk:" "[]" "" : split-path-VMS "disk:[]" ;
|
||||
# assert.result "" "[]" "file" : split-path-VMS "[]file" ;
|
||||
# assert.result "disk:" "[]" "file" : split-path-VMS "disk:[]file" ;
|
||||
#
|
||||
# #
|
||||
# # Make portable paths
|
||||
# #
|
||||
# assert.result "/disk:" : make "disk:" ;
|
||||
# assert.result "foo/bar/giz" : make "[.foo.bar.giz]" ;
|
||||
# assert.result "foo" : make "[.foo]" ;
|
||||
# assert.result "foo" : make "[.foo.bar.-]" ;
|
||||
# assert.result ".." : make "[.-]" ;
|
||||
# assert.result ".." : make "[-]" ;
|
||||
# assert.result "." : make "[]" ;
|
||||
# assert.result "giz.h" : make "giz.h" ;
|
||||
# assert.result "foo/bar/giz.h" : make "[.foo.bar]giz.h" ;
|
||||
# assert.result "/disk:/my_docs" : make "disk:[my_docs]" ;
|
||||
# assert.result "/disk:/boost/tools/build/new/project.jam" : make "disk:[boost.tools.build.test.-.new]project.jam" ;
|
||||
#
|
||||
# #
|
||||
# # Special case (adds '.' to end of file w/o extension to
|
||||
# # disambiguate from directory in portable path spec).
|
||||
# #
|
||||
# assert.result "Jamfile." : make "Jamfile" ;
|
||||
# assert.result "dir/Jamfile." : make "[.dir]Jamfile" ;
|
||||
# assert.result "/disk:/dir/Jamfile." : make "disk:[dir]Jamfile" ;
|
||||
#
|
||||
# #
|
||||
# # Make native paths
|
||||
# #
|
||||
# assert.result "disk:" : native "/disk:" ;
|
||||
# assert.result "[.foo.bar.giz]" : native "foo/bar/giz" ;
|
||||
# assert.result "[.foo]" : native "foo" ;
|
||||
# assert.result "[.-]" : native ".." ;
|
||||
# assert.result "[.foo.-]" : native "foo/.." ;
|
||||
# assert.result "[]" : native "." ;
|
||||
# assert.result "disk:[my_docs.work]" : native "/disk:/my_docs/work" ;
|
||||
# assert.result "giz.h" : native "giz.h" ;
|
||||
# assert.result "disk:Jamfile." : native "/disk:Jamfile." ;
|
||||
# assert.result "disk:[my_docs.work]Jamfile." : native "/disk:/my_docs/work/Jamfile." ;
|
||||
#
|
||||
# modules.poke path : os : $(save-os) ;
|
||||
#
|
||||
# }
|
||||
|
||||
#
|
||||
|
||||
|
||||
#def glob(dir, patterns):
|
||||
# result = []
|
||||
# for pattern in patterns:
|
||||
# result.extend(builtin_glob(os.path.join(dir, pattern)))
|
||||
# return result
|
||||
|
||||
def glob(dirs, patterns, exclude_patterns=None):
|
||||
"""Returns the list of files matching the given pattern in the
|
||||
specified directory. Both directories and patterns are
|
||||
supplied as portable paths. Each pattern should be non-absolute
|
||||
path, and can't contain '.' or '..' elements. Each slash separated
|
||||
element of pattern can contain the following special characters:
|
||||
- '?', which match any character
|
||||
- '*', which matches arbitrary number of characters.
|
||||
A file $(d)/e1/e2/e3 (where 'd' is in $(dirs)) matches pattern p1/p2/p3
|
||||
if and only if e1 matches p1, e2 matches p2 and so on.
|
||||
For example:
|
||||
[ glob . : *.cpp ]
|
||||
[ glob . : */build/Jamfile ]
|
||||
"""
|
||||
|
||||
assert(isinstance(patterns, list))
|
||||
assert(isinstance(dirs, list))
|
||||
|
||||
if not exclude_patterns:
|
||||
exclude_patterns = []
|
||||
else:
|
||||
assert(isinstance(exclude_patterns, list))
|
||||
|
||||
real_patterns = [os.path.join(d, p) for p in patterns for d in dirs]
|
||||
real_exclude_patterns = [os.path.join(d, p) for p in exclude_patterns
|
||||
for d in dirs]
|
||||
|
||||
inc = [os.path.normpath(name) for p in real_patterns
|
||||
for name in builtin_glob(p)]
|
||||
exc = [os.path.normpath(name) for p in real_exclude_patterns
|
||||
for name in builtin_glob(p)]
|
||||
return [x for x in inc if x not in exc]
|
||||
|
||||
def glob_tree(roots, patterns, exclude_patterns=None):
|
||||
"""Recursive version of GLOB. Builds the glob of files while
|
||||
also searching in the subdirectories of the given roots. An
|
||||
optional set of exclusion patterns will filter out the
|
||||
matching entries from the result. The exclusions also apply
|
||||
to the subdirectory scanning, such that directories that
|
||||
match the exclusion patterns will not be searched."""
|
||||
|
||||
if not exclude_patterns:
|
||||
exclude_patterns = []
|
||||
|
||||
result = glob(roots, patterns, exclude_patterns)
|
||||
subdirs = [s for s in result if s != "." and s != ".." and os.path.isdir(s)]
|
||||
if subdirs:
|
||||
result.extend(glob_tree(subdirs, patterns, exclude_patterns))
|
||||
|
||||
return result
|
||||
|
||||
def glob_in_parents(dir, patterns, upper_limit=None):
|
||||
"""Recursive version of GLOB which glob sall parent directories
|
||||
of dir until the first match is found. Returns an empty result if no match
|
||||
is found"""
|
||||
|
||||
assert(isinstance(dir, str))
|
||||
assert(isinstance(patterns, list))
|
||||
|
||||
result = []
|
||||
|
||||
absolute_dir = os.path.join(os.getcwd(), dir)
|
||||
absolute_dir = os.path.normpath(absolute_dir)
|
||||
while absolute_dir:
|
||||
new_dir = os.path.split(absolute_dir)[0]
|
||||
if new_dir == absolute_dir:
|
||||
break
|
||||
result = glob([new_dir], patterns)
|
||||
if result:
|
||||
break
|
||||
absolute_dir = new_dir
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# The relpath functionality is written by
|
||||
# Cimarron Taylor
|
||||
def split(p, rest=[]):
|
||||
(h,t) = os.path.split(p)
|
||||
if len(h) < 1: return [t]+rest
|
||||
if len(t) < 1: return [h]+rest
|
||||
return split(h,[t]+rest)
|
||||
|
||||
def commonpath(l1, l2, common=[]):
|
||||
if len(l1) < 1: return (common, l1, l2)
|
||||
if len(l2) < 1: return (common, l1, l2)
|
||||
if l1[0] != l2[0]: return (common, l1, l2)
|
||||
return commonpath(l1[1:], l2[1:], common+[l1[0]])
|
||||
|
||||
def relpath(p1, p2):
|
||||
(common,l1,l2) = commonpath(split(p1), split(p2))
|
||||
p = []
|
||||
if len(l1) > 0:
|
||||
p = [ '../' * len(l1) ]
|
||||
p = p + l2
|
||||
if p:
|
||||
return os.path.join( *p )
|
||||
else:
|
||||
return "."
|
||||
25
src/util/regex.py
Normal file
25
src/util/regex.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
import re
|
||||
|
||||
def transform (list, pattern, indices = [1]):
|
||||
""" Matches all elements of 'list' agains the 'pattern'
|
||||
and returns a list of the elements indicated by indices of
|
||||
all successfull matches. If 'indices' is omitted returns
|
||||
a list of first paranthethised groups of all successfull
|
||||
matches.
|
||||
"""
|
||||
result = []
|
||||
|
||||
for e in list:
|
||||
m = re.match (pattern, e)
|
||||
|
||||
if m:
|
||||
for i in indices:
|
||||
result.append (m.group (i))
|
||||
|
||||
return result
|
||||
|
||||
52
src/util/sequence.py
Normal file
52
src/util/sequence.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
import operator
|
||||
|
||||
def unique (values):
|
||||
# TODO: is this the most efficient way?
|
||||
# consider using a set from Python 2.4.
|
||||
return list(set(values))
|
||||
# cache = {}
|
||||
# result = []
|
||||
# for v in values:
|
||||
# if not cache.has_key(v):
|
||||
# cache[v] = None
|
||||
# result.append(v)
|
||||
# return result
|
||||
|
||||
|
||||
|
||||
def max_element (elements, ordered = None):
|
||||
""" Returns the maximum number in 'elements'. Uses 'ordered' for comparisons,
|
||||
or '<' is none is provided.
|
||||
"""
|
||||
if not ordered: ordered = operator.lt
|
||||
|
||||
max = elements [0]
|
||||
for e in elements [1:]:
|
||||
if ordered (max, e):
|
||||
max = e
|
||||
|
||||
return max
|
||||
|
||||
def select_highest_ranked (elements, ranks):
|
||||
""" Returns all of 'elements' for which corresponding element in parallel
|
||||
list 'rank' is equal to the maximum value in 'rank'.
|
||||
"""
|
||||
if not elements:
|
||||
return []
|
||||
|
||||
max_rank = max_element (ranks)
|
||||
|
||||
result = []
|
||||
while elements:
|
||||
if ranks [0] == max_rank:
|
||||
result.append (elements [0])
|
||||
|
||||
elements = elements [1:]
|
||||
ranks = ranks [1:]
|
||||
|
||||
return result
|
||||
42
src/util/set.py
Normal file
42
src/util/set.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
from utility import to_seq
|
||||
|
||||
def difference (b, a):
|
||||
""" Returns the elements of B that are not in A.
|
||||
"""
|
||||
result = []
|
||||
for element in b:
|
||||
if not element in a:
|
||||
result.append (element)
|
||||
|
||||
return result
|
||||
|
||||
def intersection (set1, set2):
|
||||
""" Removes from set1 any items which don't appear in set2 and returns the result.
|
||||
"""
|
||||
result = []
|
||||
for v in set1:
|
||||
if v in set2:
|
||||
result.append (v)
|
||||
return result
|
||||
|
||||
def contains (small, large):
|
||||
""" Returns true iff all elements of 'small' exist in 'large'.
|
||||
"""
|
||||
small = to_seq (small)
|
||||
large = to_seq (large)
|
||||
|
||||
for s in small:
|
||||
if not s in large:
|
||||
return False
|
||||
return True
|
||||
|
||||
def equal (a, b):
|
||||
""" Returns True iff 'a' contains the same elements as 'b', irrespective of their order.
|
||||
# TODO: Python 2.4 has a proper set class.
|
||||
"""
|
||||
return contains (a, b) and contains (b, a)
|
||||
155
src/util/utility.py
Normal file
155
src/util/utility.py
Normal file
@@ -0,0 +1,155 @@
|
||||
# (C) Copyright David Abrahams 2001. Permission to copy, use, modify, sell and
|
||||
# distribute this software is granted provided this copyright notice appears in
|
||||
# all copies. This software is provided "as is" without express or implied
|
||||
# warranty, and with no claim as to its suitability for any purpose.
|
||||
|
||||
""" Utility functions to add/remove/get grists.
|
||||
Grists are string enclosed in angle brackets (<>) that are used as prefixes. See Jam for more information.
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import bjam
|
||||
from b2.exceptions import *
|
||||
|
||||
__re_grist_and_value = re.compile (r'(<[^>]*>)(.*)')
|
||||
__re_grist_content = re.compile ('^<(.*)>$')
|
||||
__re_backslash = re.compile (r'\\')
|
||||
|
||||
def to_seq (value):
|
||||
""" If value is a sequence, returns it.
|
||||
If it is a string, returns a sequence with value as its sole element.
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
if isinstance (value, str):
|
||||
return [value]
|
||||
|
||||
else:
|
||||
return value
|
||||
|
||||
def replace_references_by_objects (manager, refs):
|
||||
objs = []
|
||||
for r in refs:
|
||||
objs.append (manager.get_object (r))
|
||||
return objs
|
||||
|
||||
def add_grist (features):
|
||||
""" Transform a string by bracketing it with "<>". If already bracketed, does nothing.
|
||||
features: one string or a sequence of strings
|
||||
return: the gristed string, if features is a string, or a sequence of gristed strings, if features is a sequence
|
||||
"""
|
||||
|
||||
def grist_one (feature):
|
||||
if feature [0] != '<' and feature [len (feature) - 1] != '>':
|
||||
return '<' + feature + '>'
|
||||
else:
|
||||
return feature
|
||||
|
||||
if isinstance (features, str):
|
||||
return grist_one (features)
|
||||
else:
|
||||
return [ grist_one (feature) for feature in features ]
|
||||
|
||||
def replace_grist (features, new_grist):
|
||||
""" Replaces the grist of a string by a new one.
|
||||
Returns the string with the new grist.
|
||||
"""
|
||||
def replace_grist_one (name, new_grist):
|
||||
split = __re_grist_and_value.match (name)
|
||||
if not split:
|
||||
return new_grist + name
|
||||
else:
|
||||
return new_grist + split.group (2)
|
||||
|
||||
if isinstance (features, str):
|
||||
return replace_grist_one (features, new_grist)
|
||||
else:
|
||||
return [ replace_grist_one (feature, new_grist) for feature in features ]
|
||||
|
||||
def get_value (property):
|
||||
""" Gets the value of a property, that is, the part following the grist, if any.
|
||||
"""
|
||||
return replace_grist (property, '')
|
||||
|
||||
def get_grist (value):
|
||||
""" Returns the grist of a string.
|
||||
If value is a sequence, does it for every value and returns the result as a sequence.
|
||||
"""
|
||||
def get_grist_one (name):
|
||||
split = __re_grist_and_value.match (name)
|
||||
if not split:
|
||||
return ''
|
||||
else:
|
||||
return split.group (1)
|
||||
|
||||
if isinstance (value, str):
|
||||
return get_grist_one (value)
|
||||
else:
|
||||
return [ get_grist_one (v) for v in value ]
|
||||
|
||||
def ungrist (value):
|
||||
""" Returns the value without grist.
|
||||
If value is a sequence, does it for every value and returns the result as a sequence.
|
||||
"""
|
||||
def ungrist_one (value):
|
||||
stripped = __re_grist_content.match (value)
|
||||
if not stripped:
|
||||
raise BaseException ("in ungrist: '%s' is not of the form <.*>" % value)
|
||||
|
||||
return stripped.group (1)
|
||||
|
||||
if isinstance (value, str):
|
||||
return ungrist_one (value)
|
||||
else:
|
||||
return [ ungrist_one (v) for v in value ]
|
||||
|
||||
def replace_suffix (name, new_suffix):
|
||||
""" Replaces the suffix of name by new_suffix.
|
||||
If no suffix exists, the new one is added.
|
||||
"""
|
||||
split = os.path.splitext (name)
|
||||
return split [0] + new_suffix
|
||||
|
||||
def forward_slashes (s):
|
||||
""" Converts all backslashes to forward slashes.
|
||||
"""
|
||||
return __re_backslash.sub ('/', s)
|
||||
|
||||
|
||||
def split_action_id (id):
|
||||
""" Splits an id in the toolset and specific rule parts. E.g.
|
||||
'gcc.compile.c++' returns ('gcc', 'compile.c++')
|
||||
"""
|
||||
split = id.split ('.', 1)
|
||||
toolset = split [0]
|
||||
name = ''
|
||||
if len (split) > 1:
|
||||
name = split [1]
|
||||
return (toolset, name)
|
||||
|
||||
def os_name ():
|
||||
result = bjam.variable("OS")
|
||||
assert(len(result) == 1)
|
||||
return result[0]
|
||||
|
||||
def platform ():
|
||||
return bjam.variable("OSPLAT")
|
||||
|
||||
def os_version ():
|
||||
return bjam.variable("OSVER")
|
||||
|
||||
def on_windows ():
|
||||
""" Returns true if running on windows, whether in cygwin or not.
|
||||
"""
|
||||
if bjam.variable("NT"):
|
||||
return True
|
||||
|
||||
elif bjam.variable("UNIX"):
|
||||
|
||||
uname = bjam.variable("JAMUNAME")
|
||||
if uname and uname[0].startswith("CYGWIN"):
|
||||
return True
|
||||
|
||||
return False
|
||||
Reference in New Issue
Block a user