mirror of
https://github.com/boostorg/build.git
synced 2026-02-16 13:22:11 +00:00
This commit was manufactured by cvs2svn to create branch 'RC_1_34_0'.
[SVN r36550]
This commit is contained in:
617
v2/tools/doxproc.py
Normal file
617
v2/tools/doxproc.py
Normal file
@@ -0,0 +1,617 @@
|
||||
#!/usr/bin/python
|
||||
# Copyright 2006 Rene Rivera
|
||||
# Distributed under the Boost Software License, Version 1.0.
|
||||
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
|
||||
|
||||
'''
|
||||
Processing of Doxygen generated XML.
|
||||
'''
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import time
|
||||
import string
|
||||
import getopt
|
||||
import glob
|
||||
import re
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def usage():
|
||||
print '''
|
||||
Usage:
|
||||
%s options
|
||||
|
||||
Options:
|
||||
--xmldir Directory with the Doxygen xml result files.
|
||||
--output Write the output BoostBook to the given location.
|
||||
--id The ID of the top level BoostBook section.
|
||||
--title The title of the top level BoostBook section.
|
||||
--enable-index Generate additional index sections for classes and
|
||||
types.
|
||||
''' % ( sys.argv[0] )
|
||||
|
||||
|
||||
def get_args( argv = sys.argv[1:] ):
|
||||
spec = [
|
||||
'xmldir=',
|
||||
'output=',
|
||||
'id=',
|
||||
'title=',
|
||||
'enable-index',
|
||||
'help' ]
|
||||
options = {
|
||||
'--xmldir' : 'xml',
|
||||
'--output' : None,
|
||||
'--id' : 'dox',
|
||||
'--title' : 'Doxygen'
|
||||
}
|
||||
( option_pairs, other ) = getopt.getopt( argv, '', spec )
|
||||
map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs )
|
||||
|
||||
if options.has_key( '--help' ):
|
||||
usage()
|
||||
sys.exit(1)
|
||||
|
||||
return {
|
||||
'xmldir' : options['--xmldir'],
|
||||
'output' : options['--output'],
|
||||
'id' : options['--id'],
|
||||
'title' : options['--title'],
|
||||
'index' : options.has_key('--enable-index')
|
||||
}
|
||||
|
||||
def if_attribute(node, attribute, true_value, false_value=None):
|
||||
if node.getAttribute(attribute) == 'yes':
|
||||
return true_value
|
||||
else:
|
||||
return false_value
|
||||
|
||||
class Doxygen2BoostBook:
|
||||
|
||||
def __init__( self,
|
||||
#~ id=None,
|
||||
#~ title='',
|
||||
#~ last_revision=None,
|
||||
**kwargs ):
|
||||
##
|
||||
self.args = kwargs
|
||||
self.args.setdefault('id','')
|
||||
self.args.setdefault('title','')
|
||||
self.args.setdefault('last_revision', time.asctime())
|
||||
self.args.setdefault('index', False)
|
||||
self.id = '%(id)s.reference' % self.args
|
||||
self.args['id'] = self.id
|
||||
self.boostbook = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
|
||||
<section id="%(id)s" name="%(title)s" last-revision="%(last_revision)s">
|
||||
<title>%(title)s</title>
|
||||
<library-reference id="%(id)s.headers">
|
||||
<title>Headers</title>
|
||||
</library-reference>
|
||||
<index id="%(id)s.classes">
|
||||
<title>Classes</title>
|
||||
</index>
|
||||
<index id="%(id)s.index">
|
||||
<title>Index</title>
|
||||
</index>
|
||||
</section>
|
||||
''' % self.args )
|
||||
self.section = {
|
||||
'headers' : self._getChild('library-reference',id='%(id)s.headers' % self.args),
|
||||
'classes' : self._getChild('index',id='%(id)s.classes' % self.args),
|
||||
'index' : self._getChild('index',id='%(id)s.index' % self.args)
|
||||
}
|
||||
if not self.args['index']:
|
||||
self.section['classes'].parentNode.removeChild(self.section['classes'])
|
||||
self.section['classes'].unlink()
|
||||
del self.section['classes']
|
||||
self.section['index'].parentNode.removeChild(self.section['index'])
|
||||
self.section['index'].unlink()
|
||||
del self.section['index']
|
||||
self.symbols = {}
|
||||
self.generated = False
|
||||
self.idmap = {}
|
||||
|
||||
def addDox( self, document ):
|
||||
##
|
||||
self._translateNode(document.documentElement)
|
||||
|
||||
def tostring( self ):
|
||||
self._generate()
|
||||
#~ return self.boostbook.toprettyxml(' ')
|
||||
return self.boostbook.toxml('utf-8')
|
||||
|
||||
def _generate( self ):
|
||||
if not self.generated:
|
||||
self.generated = True
|
||||
symbols = self.symbols.keys()
|
||||
symbols.sort()
|
||||
for symbol in symbols:
|
||||
if self.symbols[symbol]['kind'] in ('header'):
|
||||
self.section['headers'].appendChild(self.symbols[symbol]['dom'])
|
||||
for symbol in symbols:
|
||||
if self.symbols[symbol]['kind'] not in ('namespace', 'header'):
|
||||
container = self._resolveContainer(self.symbols[symbol],
|
||||
self.symbols[self.symbols[symbol]['header']]['dom'])
|
||||
if container.nodeName != 'namespace':
|
||||
## The current BoostBook to Docbook translation doesn't
|
||||
## respect, nor assign, IDs to inner types of any kind.
|
||||
## So nuke the ID entry so as not create bogus links.
|
||||
del self.idmap[self.symbols[symbol]['id']]
|
||||
container.appendChild(self.symbols[symbol]['dom'])
|
||||
self._rewriteIDs(self.boostbook.documentElement)
|
||||
|
||||
def _rewriteIDs( self, node ):
|
||||
if node.nodeName in ('link'):
|
||||
if (self.idmap.has_key(node.getAttribute('linkend'))):
|
||||
node.setAttribute('linkend',self.idmap[node.getAttribute('linkend')])
|
||||
else:
|
||||
node.removeAttribute('linkend')
|
||||
elif hasattr(node,'hasAttribute') and node.hasAttribute('id') and self.idmap.has_key(node.getAttribute('id')):
|
||||
node.setAttribute('id',self.idmap[node.getAttribute('id')])
|
||||
if node.firstChild:
|
||||
self._rewriteIDs(node.firstChild)
|
||||
if node.nextSibling:
|
||||
self._rewriteIDs(node.nextSibling)
|
||||
|
||||
def _resolveContainer( self, cpp, root ):
|
||||
container = root
|
||||
for ns in cpp['namespace']:
|
||||
node = self._getChild('namespace',name=ns,root=container)
|
||||
if not node:
|
||||
node = container.appendChild(
|
||||
self._createNode('namespace',name=ns))
|
||||
container = node
|
||||
for inner in cpp['name'].split('::'):
|
||||
node = self._getChild(name=inner,root=container)
|
||||
if not node:
|
||||
break
|
||||
container = node
|
||||
return container
|
||||
|
||||
def _setID( self, id, name ):
|
||||
self.idmap[id] = name.replace('::','.').replace('/','.')
|
||||
#~ print '--| setID:',id,'::',self.idmap[id]
|
||||
|
||||
def _translateNode( self, *context, **kwargs ):
|
||||
node = None
|
||||
name = '_translate'
|
||||
for c in context:
|
||||
if c:
|
||||
if not isinstance(c,xml.dom.Node):
|
||||
name += '_'+c
|
||||
else:
|
||||
name += '_'+c.nodeName
|
||||
node = c
|
||||
name = name.replace('-','_')
|
||||
#~ print '_translateNode:', name
|
||||
if node and hasattr(self,name):
|
||||
return getattr(self,name)(node,**kwargs)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _translateChildren( self, parent, **kwargs ):
|
||||
target = kwargs['target']
|
||||
for n in parent.childNodes:
|
||||
child = self._translateNode(n,target=target)
|
||||
if child:
|
||||
target.appendChild(child)
|
||||
else:
|
||||
child = n.cloneNode(False)
|
||||
if hasattr(child,'data'):
|
||||
child.data = child.data.strip()
|
||||
target.appendChild(child)
|
||||
self._translateChildren(n,target=child)
|
||||
|
||||
def _translateDescription( self, node, target=None, tag='description', **kwargs ):
|
||||
description = self._getChild(tag,root=target)
|
||||
if not description:
|
||||
description = target.appendChild(self._createNode(tag))
|
||||
self._translateChildren(node,target=description)
|
||||
return description
|
||||
|
||||
def _translate_doxygen( self, node ):
|
||||
#~ print '_translate_doxygen:', node.nodeName
|
||||
result = []
|
||||
for n in node.childNodes:
|
||||
newNode = self._translateNode(n)
|
||||
if newNode:
|
||||
result.append(newNode)
|
||||
return result
|
||||
|
||||
def _translate_doxygenindex( self, node ):
|
||||
#~ print '_translate_doxygenindex:', node.nodeName
|
||||
if self.args['index']:
|
||||
entries = []
|
||||
classes = []
|
||||
for n in node.childNodes:
|
||||
if n.nodeName == 'compound':
|
||||
if n.getAttribute('kind') not in ('file','dir','define'):
|
||||
cpp = self._cppName(self._getChildData('name',root=n))
|
||||
entry = {
|
||||
'name' : cpp['name'],
|
||||
'compoundname' : cpp['compoundname'],
|
||||
'id' : n.getAttribute('refid')
|
||||
}
|
||||
if n.getAttribute('kind') in ('class','struct'):
|
||||
classes.append(entry)
|
||||
entries.append(entry)
|
||||
for m in n.childNodes:
|
||||
if m.nodeName == 'member':
|
||||
cpp = self._cppName(self._getChildData('name',root=m))
|
||||
entry = {
|
||||
'name' : cpp['name'],
|
||||
'compoundname' : cpp['compoundname'],
|
||||
'id' : n.getAttribute('refid')
|
||||
}
|
||||
if hasattr(m,'getAttribute') and m.getAttribute('kind') in ('class','struct'):
|
||||
classes.append(entry)
|
||||
entries.append(entry)
|
||||
entries.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
|
||||
classes.sort(lambda x,y: cmp(x['name'].lower(),y['name'].lower()))
|
||||
self._translate_index_(entries,target=self.section['index'])
|
||||
self._translate_index_(classes,target=self.section['classes'])
|
||||
return None
|
||||
|
||||
def _translate_index_(self, entries, target=None, **kwargs ):
|
||||
i = 0
|
||||
targetID = target.getAttribute('id')
|
||||
while i < len(entries):
|
||||
dividerKey = entries[i]['name'][0].upper()
|
||||
divider = target.appendChild(self._createNode('indexdiv',id=targetID+'.'+dividerKey))
|
||||
divider.appendChild(self._createText('title',dividerKey))
|
||||
while i < len(entries) and dividerKey == entries[i]['name'][0].upper():
|
||||
iename = entries[i]['name']
|
||||
ie = divider.appendChild(self._createNode('indexentry'))
|
||||
ie = ie.appendChild(self._createText('primaryie',iename))
|
||||
while i < len(entries) and entries[i]['name'] == iename:
|
||||
ie.appendChild(self.boostbook.createTextNode(' ('))
|
||||
ie.appendChild(self._createText(
|
||||
'link',entries[i]['compoundname'],linkend=entries[i]['id']))
|
||||
ie.appendChild(self.boostbook.createTextNode(')'))
|
||||
i += 1
|
||||
|
||||
def _translate_compounddef( self, node, target=None, **kwargs ):
|
||||
return self._translateNode(node,node.getAttribute('kind'))
|
||||
|
||||
def _translate_compounddef_namespace( self, node, target=None, **kwargs ):
|
||||
#~ print '--| _translate_compounddef_namespace:', node.getAttribute('id')
|
||||
namespace = {
|
||||
'id' : node.getAttribute('id'),
|
||||
'kind' : 'namespace',
|
||||
'name' : self._getChildData('compoundname',root=node),
|
||||
'brief' : self._getChildData('briefdescription',root=node),
|
||||
'detailed' : self._getChildData('detaileddescription',root=node),
|
||||
'parsed' : False
|
||||
}
|
||||
if self.symbols.has_key(namespace['name']):
|
||||
if not self.symbols[namespace['name']]['parsed']:
|
||||
self.symbols[namespace['name']]['parsed'] = True
|
||||
#~ for n in node.childNodes:
|
||||
#~ if hasattr(n,'getAttribute'):
|
||||
#~ self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
|
||||
else:
|
||||
self.symbols[namespace['name']] = namespace
|
||||
#~ self._setID(namespace['id'],namespace['name'])
|
||||
return None
|
||||
|
||||
def _translate_compounddef_class( self, node, target=None, **kwargs ):
|
||||
return self._translate_compounddef_struct(node,tag='class',target=target,**kwargs)
|
||||
|
||||
def _translate_compounddef_struct( self, node, tag='struct', target=None, **kwargs ):
|
||||
#~ print '--| _translate_compounddef_struct:', node.getAttribute('id')
|
||||
result = None
|
||||
includes = self._getChild('includes',root=node)
|
||||
if includes:
|
||||
## Add the header into the output table.
|
||||
self._translate_compounddef_includes_(includes,includes,**kwargs)
|
||||
## Compounds are the declared symbols, classes, types, etc.
|
||||
## We add them to the symbol table, along with the partial DOM for them
|
||||
## so that they can be organized into the output later.
|
||||
compoundname = self._getChildData('compoundname',root=node)
|
||||
compoundname = self._cppName(compoundname)
|
||||
self._setID(node.getAttribute('id'),compoundname['compoundname'])
|
||||
struct = self._createNode(tag,name=compoundname['name'].split('::')[-1])
|
||||
self.symbols[compoundname['compoundname']] = {
|
||||
'header' : includes.firstChild.data,
|
||||
'namespace' : compoundname['namespace'],
|
||||
'id' : node.getAttribute('id'),
|
||||
'kind' : tag,
|
||||
'name' : compoundname['name'],
|
||||
'dom' : struct
|
||||
}
|
||||
for n in node.childNodes:
|
||||
self._translateNode(n,target=struct,scope=compoundname['compoundname'])
|
||||
result = struct
|
||||
return result
|
||||
|
||||
def _translate_compounddef_includes_( self, node, target=None, **kwargs ):
|
||||
name = node.firstChild.data
|
||||
if not self.symbols.has_key(name):
|
||||
self._setID(node.getAttribute('refid'),name)
|
||||
self.symbols[name] = {
|
||||
'kind' : 'header',
|
||||
'id' : node.getAttribute('refid'),
|
||||
'dom' : self._createNode('header',
|
||||
id=node.getAttribute('refid'),
|
||||
name=name)
|
||||
}
|
||||
return None
|
||||
|
||||
def _translate_basecompoundref( self, ref, target=None, **kwargs ):
|
||||
inherit = target.appendChild(self._createNode('inherit',
|
||||
access=ref.getAttribute('prot')))
|
||||
self._translateChildren(ref,target=inherit)
|
||||
return
|
||||
|
||||
def _translate_templateparamlist( self, templateparamlist, target=None, **kwargs ):
|
||||
template = target.appendChild(self._createNode('template'))
|
||||
for param in templateparamlist.childNodes:
|
||||
if param.nodeName == 'param':
|
||||
paramKind = None
|
||||
if self._getChildData('type',root=param) in (
|
||||
'class','typename'):
|
||||
paramKind = 'template-type-parameter'
|
||||
else:
|
||||
paramKind = 'template-nontype-parameter'
|
||||
templateParam = template.appendChild(
|
||||
self._createNode(paramKind,
|
||||
name=self._getChildData('declname',root=param)))
|
||||
defval = self._getChild('defval',root=param)
|
||||
if defval:
|
||||
templateParam.appendChild(self._createText('default',
|
||||
self._getChildData('ref',root=defval.firstChild)))
|
||||
return template
|
||||
|
||||
def _translate_briefdescription( self, brief, target=None, **kwargs ):
|
||||
self._translateDescription(brief,target=target,**kwargs)
|
||||
return self._translateDescription(brief,target=target,tag='purpose',**kwargs)
|
||||
|
||||
def _translate_detaileddescription( self, detailed, target=None, **kwargs ):
|
||||
return self._translateDescription(detailed,target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef( self, sectiondef, target=None, **kwargs ):
|
||||
self._translateNode(sectiondef,sectiondef.getAttribute('kind'),target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_x_( self, sectiondef, target=None, **kwargs ):
|
||||
for n in sectiondef.childNodes:
|
||||
if hasattr(n,'getAttribute'):
|
||||
self._translateNode(n,n.getAttribute('kind'),target=target,**kwargs)
|
||||
return None
|
||||
|
||||
def _translate_sectiondef_func_( self, sectiondef, name='functions', target=None, **kwargs ):
|
||||
members = target.appendChild(self._createNode('method-group',name=name))
|
||||
for n in sectiondef.childNodes:
|
||||
if hasattr(n,'getAttribute'):
|
||||
self._translateNode(n,n.getAttribute('kind'),target=members,**kwargs)
|
||||
return members
|
||||
|
||||
def _translate_sectiondef_public_type( self, sectiondef, target=None, **kwargs ):
|
||||
return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_public_attrib( self, sectiondef, target=None, **kwargs):
|
||||
return self._translate_sectiondef_x_(sectiondef,target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_public_func( self, sectiondef, target=None, **kwargs ):
|
||||
return self._translate_sectiondef_func_(sectiondef,
|
||||
name='public member functions',target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_public_static_func( self, sectiondef, target=None, **kwargs):
|
||||
return self._translate_sectiondef_func_(sectiondef,
|
||||
name='public static functions',target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_protected_func( self, sectiondef, target=None, **kwargs ):
|
||||
return self._translate_sectiondef_func_(sectiondef,
|
||||
name='protected member functions',target=target,**kwargs)
|
||||
|
||||
def _translate_sectiondef_private_static_func( self, sectiondef, target=None, **kwargs):
|
||||
return self._translate_sectiondef_func_(sectiondef,
|
||||
name='private static functions',target=target,**kwargs)
|
||||
|
||||
def _translate_memberdef_typedef( self, memberdef, target=None, scope=None, **kwargs ):
|
||||
self._setID(memberdef.getAttribute('id'),
|
||||
scope+'::'+self._getChildData('name',root=memberdef))
|
||||
typedef = target.appendChild(self._createNode('typedef',
|
||||
id=memberdef.getAttribute('id'),
|
||||
name=self._getChildData('name',root=memberdef)))
|
||||
typedef_type = typedef.appendChild(self._createNode('type'))
|
||||
self._translateChildren(self._getChild('type',root=memberdef),target=typedef_type)
|
||||
return typedef
|
||||
|
||||
def _translate_memberdef_function( self, memberdef, target=None, scope=None, **kwargs ):
|
||||
## The current BoostBook to Docbook translator doesn't respect method
|
||||
## Ids. Nor does it assign any useable IDs to the individial methods.
|
||||
# self._setID(memberdef.getAttribute('id'),
|
||||
# scope+'::'+self._getChildData('name',root=memberdef))
|
||||
## Hence instead of registering an ID for the method we point it at the
|
||||
## containing class.
|
||||
self._setID(memberdef.getAttribute('id'),scope)
|
||||
method = target.appendChild(self._createNode('method',
|
||||
# id=memberdef.getAttribute('id'),
|
||||
name=self._getChildData('name',root=memberdef),
|
||||
cv=' '.join([
|
||||
if_attribute(memberdef,'const','const','').strip()
|
||||
]),
|
||||
specifiers=' '.join([
|
||||
if_attribute(memberdef,'static','static',''),
|
||||
if_attribute(memberdef,'explicit','explicit',''),
|
||||
if_attribute(memberdef,'inline','inline','')
|
||||
]).strip()
|
||||
))
|
||||
for n in memberdef.childNodes:
|
||||
self._translateNode(memberdef,'function',n,target=method)
|
||||
return method
|
||||
|
||||
def _translate_memberdef_function_templateparamlist(
|
||||
self, templateparamlist, target=None, **kwargs ):
|
||||
return self._translate_templateparamlist(templateparamlist,target=target,**kwargs)
|
||||
|
||||
def _translate_memberdef_function_type( self, resultType, target=None, **kwargs ):
|
||||
methodType = target.appendChild(self._createNode('type'))
|
||||
self._translateChildren(resultType,target=methodType)
|
||||
return methodType
|
||||
|
||||
def _translate_memberdef_function_briefdescription( self, description, target=None, **kwargs ):
|
||||
self._translateDescription(description,target=target,**kwargs)
|
||||
return self._translateDescription(description,target=target,tag='purpose',**kwargs)
|
||||
|
||||
def _translate_memberdef_function_detaileddescription( self, description, target=None, **kwargs ):
|
||||
return self._translateDescription(description,target=target,**kwargs)
|
||||
|
||||
def _translate_memberdef_function_inbodydescription( self, description, target=None, **kwargs ):
|
||||
return self._translateDescription(description,target=target,**kwargs)
|
||||
|
||||
def _translate_memberdef_function_param( self, param, target=None, **kwargs ):
|
||||
return self._translate_param(param,target=target,**kwargs)
|
||||
|
||||
def _translate_memberdef_variable( self, memberdef, target=None, scope=None, **kwargs ):
|
||||
self._setID(memberdef.getAttribute('id'),
|
||||
scope+'::'+self._getChildData('name',root=memberdef))
|
||||
data_member = target.appendChild(self._createNode('data-member',
|
||||
id=memberdef.getAttribute('id'),
|
||||
name=self._getChildData('name',root=memberdef)))
|
||||
data_member_type = data_member.appendChild(self._createNode('type'))
|
||||
self._translateChildren(self._getChild('type',root=memberdef),target=data_member_type)
|
||||
|
||||
def _translate_memberdef_enum( self, memberdef, target=None, scope=None, **kwargs ):
|
||||
self._setID(memberdef.getAttribute('id'),
|
||||
scope+'::'+self._getChildData('name',root=memberdef))
|
||||
enum = target.appendChild(self._createNode('enum',
|
||||
id=memberdef.getAttribute('id'),
|
||||
name=self._getChildData('name',root=memberdef)))
|
||||
for n in memberdef.childNodes:
|
||||
self._translateNode(memberdef,'enum',n,target=enum,scope=scope,**kwargs)
|
||||
return enum
|
||||
|
||||
def _translate_memberdef_enum_enumvalue( self, enumvalue, target=None, scope=None, **kwargs ):
|
||||
self._setID(enumvalue.getAttribute('id'),
|
||||
scope+'::'+self._getChildData('name',root=enumvalue))
|
||||
value = target.appendChild(self._createNode('enumvalue',
|
||||
id=enumvalue.getAttribute('id'),
|
||||
name=self._getChildData('name',root=enumvalue)))
|
||||
initializer = self._getChild('initializer',root=enumvalue)
|
||||
if initializer:
|
||||
self._translateChildren(initializer,
|
||||
target=target.appendChild(self._createNode('default')))
|
||||
return value
|
||||
|
||||
def _translate_param( self, param, target=None, **kwargs):
|
||||
parameter = target.appendChild(self._createNode('parameter',
|
||||
name=self._getChildData('declname',root=param)))
|
||||
paramtype = parameter.appendChild(self._createNode('paramtype'))
|
||||
self._translateChildren(self._getChild('type',root=param),target=paramtype)
|
||||
defval = self._getChild('defval',root=param)
|
||||
if defval:
|
||||
self._translateChildren(self._getChild('defval',root=param),target=parameter)
|
||||
return parameter
|
||||
|
||||
def _translate_ref( self, ref, **kwargs ):
|
||||
return self._translateNode(ref,ref.getAttribute('kindref'))
|
||||
|
||||
def _translate_ref_compound( self, ref, **kwargs ):
|
||||
result = self._createNode('link',linkend=ref.getAttribute('refid'))
|
||||
classname = result.appendChild(self._createNode('classname'))
|
||||
self._translateChildren(ref,target=classname)
|
||||
return result
|
||||
|
||||
def _translate_ref_member( self, ref, **kwargs ):
|
||||
result = self._createNode('link',linkend=ref.getAttribute('refid'))
|
||||
self._translateChildren(ref,target=result)
|
||||
return result
|
||||
|
||||
def _getChild( self, tag = None, id = None, name = None, root = None ):
|
||||
if not root:
|
||||
root = self.boostbook.documentElement
|
||||
for n in root.childNodes:
|
||||
found = True
|
||||
if tag and found:
|
||||
found = found and tag == n.nodeName
|
||||
if id and found:
|
||||
if n.hasAttribute('id'):
|
||||
found = found and n.getAttribute('id') == id
|
||||
else:
|
||||
found = found and n.hasAttribute('id') and n.getAttribute('id') == id
|
||||
if name and found:
|
||||
found = found and n.hasAttribute('name') and n.getAttribute('name') == name
|
||||
if found:
|
||||
#~ print '--|', n
|
||||
return n
|
||||
return None
|
||||
|
||||
def _getChildData( self, tag, **kwargs ):
|
||||
child = self._getChild(tag,**kwargs)
|
||||
if child:
|
||||
text = self._getChild('#text',root=child)
|
||||
if text:
|
||||
return text.data.strip()
|
||||
return ''
|
||||
|
||||
def _cppName( self, type ):
|
||||
parts = re.search('^([^<]+)[<]?(.*)[>]?$',type.strip().strip(':'))
|
||||
result = {
|
||||
'compoundname' : parts.group(1),
|
||||
'namespace' : parts.group(1).split('::')[0:-1],
|
||||
'name' : parts.group(1).split('::')[-1],
|
||||
'specialization' : parts.group(2)
|
||||
}
|
||||
if result['namespace'] and len(result['namespace']) > 0:
|
||||
namespace = '::'.join(result['namespace'])
|
||||
while (
|
||||
len(result['namespace']) > 0 and (
|
||||
not self.symbols.has_key(namespace) or
|
||||
self.symbols[namespace]['kind'] != 'namespace')
|
||||
):
|
||||
result['name'] = result['namespace'].pop()+'::'+result['name']
|
||||
namespace = '::'.join(result['namespace'])
|
||||
return result
|
||||
|
||||
def _createNode( self, tag, **kwargs ):
|
||||
result = self.boostbook.createElement(tag)
|
||||
for k in kwargs.keys():
|
||||
if k == 'id':
|
||||
result.setAttribute('id',kwargs[k])
|
||||
else:
|
||||
result.setAttribute(k,kwargs[k])
|
||||
return result
|
||||
|
||||
def _createText( self, tag, data, **kwargs ):
|
||||
result = self._createNode(tag,**kwargs)
|
||||
data = data.strip()
|
||||
if len(data) > 0:
|
||||
result.appendChild(self.boostbook.createTextNode(data))
|
||||
return result
|
||||
|
||||
|
||||
def main( xmldir=None, output=None, id=None, title=None, index=False ):
|
||||
#~ print '--- main: xmldir = %s, output = %s' % (xmldir,output)
|
||||
|
||||
input = glob.glob( os.path.abspath( os.path.join( xmldir, "*.xml" ) ) )
|
||||
input.sort
|
||||
translator = Doxygen2BoostBook(id=id, title=title, index=index)
|
||||
#~ Feed in the namespaces first to build up the set of namespaces
|
||||
#~ and definitions so that lookup is unambiguous when reading in the definitions.
|
||||
namespace_files = filter(
|
||||
lambda x:
|
||||
os.path.basename(x).startswith('namespace_'),
|
||||
input)
|
||||
decl_files = filter(
|
||||
lambda x:
|
||||
not os.path.basename(x).startswith('namespace_') and not os.path.basename(x).startswith('_'),
|
||||
input)
|
||||
for dox in namespace_files:
|
||||
#~ print '--|',os.path.basename(dox)
|
||||
translator.addDox(xml.dom.minidom.parse(dox))
|
||||
for dox in decl_files:
|
||||
#~ print '--|',os.path.basename(dox)
|
||||
translator.addDox(xml.dom.minidom.parse(dox))
|
||||
|
||||
if output:
|
||||
output = open(output,'w')
|
||||
else:
|
||||
output = sys.stdout
|
||||
if output:
|
||||
output.write(translator.tostring())
|
||||
|
||||
|
||||
main( **get_args() )
|
||||
456
v2/tools/mpi.jam
Normal file
456
v2/tools/mpi.jam
Normal file
@@ -0,0 +1,456 @@
|
||||
# Support for the Message Passing Interface (MPI)
|
||||
#
|
||||
# (C) Copyright 2005, 2006 Trustees of Indiana University
|
||||
# (C) Copyright 2005 Douglas Gregor
|
||||
#
|
||||
# Distributed under the Boost Software License, Version 1.0. (See accompanying
|
||||
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
|
||||
#
|
||||
# Authors: Douglas Gregor
|
||||
# Andrew Lumsdaine
|
||||
#
|
||||
# ==== MPI Configuration ====
|
||||
#
|
||||
# For many users, MPI support can be enabled simply by adding the following
|
||||
# line to your user-config.jam file:
|
||||
#
|
||||
# using mpi ;
|
||||
#
|
||||
# This should auto-detect MPI settings based on the MPI wrapper compiler in
|
||||
# your path, e.g., "mpic++". If the wrapper compiler is not in your path, or
|
||||
# has a different name, you can pass the name of the wrapper compiler as the
|
||||
# first argument to the mpi module:
|
||||
#
|
||||
# using mpi : /opt/mpich2-1.0.4/bin/mpiCC ;
|
||||
#
|
||||
# If your MPI implementation does not have a wrapper compiler, or the MPI
|
||||
# auto-detection code does not work with your MPI's wrapper compiler,
|
||||
# you can pass MPI-related options explicitly via the second parameter to the
|
||||
# mpi module:
|
||||
#
|
||||
# using mpi : : <find-shared-library>lammpio <find-shared-library>lammpi++
|
||||
# <find-shared-library>mpi <find-shared-library>lam
|
||||
# <find-shared-library>dl ;
|
||||
#
|
||||
# To see the results of MPI auto-detection, pass "--debug-configuration" on
|
||||
# the bjam command line.
|
||||
#
|
||||
# The (optional) fourth argument configures Boost.MPI for running
|
||||
# regression tests. These parameters specify the executable used to
|
||||
# launch jobs (default: "mpirun") followed by any necessary arguments
|
||||
# to this to run tests and tell the program to expect the number of
|
||||
# processors to follow (default: "-np"). With the default parameters,
|
||||
# for instance, the test harness will execute, e.g.,
|
||||
#
|
||||
# mpirun -np 4 all_gather_test
|
||||
#
|
||||
# ==== Linking Against the MPI Libraries ===
|
||||
#
|
||||
# To link against the MPI libraries, import the "mpi" module and add the
|
||||
# following requirement to your target:
|
||||
#
|
||||
# <library>/mpi//mpi
|
||||
#
|
||||
# Since MPI support is not always available, you should check
|
||||
# "mpi.configured" before trying to link against the MPI libraries.
|
||||
|
||||
import "class" : new ;
|
||||
import common ;
|
||||
import feature : feature ;
|
||||
import generators ;
|
||||
import project ;
|
||||
import property ;
|
||||
import testing ;
|
||||
import toolset ;
|
||||
import type ;
|
||||
|
||||
# Make this module a project
|
||||
project.initialize $(__name__) ;
|
||||
project mpi ;
|
||||
|
||||
if [ MATCH (--debug-configuration) : [ modules.peek : ARGV ] ]
|
||||
{
|
||||
.debug-configuration = true ;
|
||||
}
|
||||
|
||||
# Assuming the first part of the command line is the given prefix
|
||||
# followed by some non-empty value, remove the first argument. Returns
|
||||
# either nothing (if there was no prefix or no value) or a pair
|
||||
#
|
||||
# <name>value rest-of-cmdline
|
||||
#
|
||||
# This is a subroutine of cmdline_to_features
|
||||
rule add_feature ( prefix name cmdline )
|
||||
{
|
||||
local match = [ MATCH "^$(prefix)([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
|
||||
|
||||
# If there was no value associated with the prefix, abort
|
||||
if ! $(match) {
|
||||
return ;
|
||||
}
|
||||
|
||||
local value = $(match[1]) ;
|
||||
|
||||
if [ MATCH " +" : $(value) ] {
|
||||
value = "\"$(value)\"" ;
|
||||
}
|
||||
|
||||
return "<$(name)>$(value)" $(match[2]) ;
|
||||
}
|
||||
|
||||
# Strip any end-of-line characters off the given string and return the
|
||||
# result.
|
||||
rule strip-eol ( string )
|
||||
{
|
||||
local match = [ MATCH "^(([A-Za-z0-9~`\.!@#$%^&*()_+={};:'\",.<>/?\\| -]|[|])*).*$" : $(string) ] ;
|
||||
|
||||
if $(match)
|
||||
{
|
||||
return $(match[1]) ;
|
||||
}
|
||||
else
|
||||
{
|
||||
return $(string) ;
|
||||
}
|
||||
}
|
||||
|
||||
# Split a command-line into a set of features. Certain kinds of
|
||||
# compiler flags are recognized (e.g., -I, -D, -L, -l) and replaced
|
||||
# with their Boost.Build equivalents (e.g., <include>, <define>,
|
||||
# <library-path>, <find-library>). All other arguments are introduced
|
||||
# using the features in the unknown-features parameter, because we
|
||||
# don't know how to deal with them. For instance, if your compile and
|
||||
# correct. The incoming command line should be a string starting with
|
||||
# an executable (e.g., g++ -I/include/path") and may contain any
|
||||
# number of command-line arguments thereafter. The result is a list of
|
||||
# features corresponding to the given command line, ignoring the
|
||||
# executable.
|
||||
rule cmdline_to_features ( cmdline : unknown-features ? )
|
||||
{
|
||||
local executable ;
|
||||
local features ;
|
||||
local otherflags ;
|
||||
local result ;
|
||||
|
||||
unknown-features ?= <cxxflags> <linkflags> ;
|
||||
|
||||
# Pull the executable out of the command line. At this point, the
|
||||
# executable is just thrown away.
|
||||
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
|
||||
executable = $(match[1]) ;
|
||||
cmdline = $(match[2]) ;
|
||||
|
||||
# List the prefix/feature pairs that we will be able to transform.
|
||||
# Every kind of parameter not mentioned here will be placed in both
|
||||
# cxxflags and linkflags, because we don't know where they should go.
|
||||
local feature_kinds-D = "define" ;
|
||||
local feature_kinds-I = "include" ;
|
||||
local feature_kinds-L = "library-path" ;
|
||||
local feature_kinds-l = "find-shared-library" ;
|
||||
|
||||
while $(cmdline) {
|
||||
|
||||
# Check for one of the feature prefixes we know about. If we
|
||||
# find one (and the associated value is nonempty), convert it
|
||||
# into a feature.
|
||||
local match = [ MATCH "^(-.)(.*)" : $(cmdline) ] ;
|
||||
local matched ;
|
||||
if $(match) && $(match[2]) {
|
||||
local prefix = $(match[1]) ;
|
||||
if $(feature_kinds$(prefix)) {
|
||||
local name = $(feature_kinds$(prefix)) ;
|
||||
local add = [ add_feature $(prefix) $(name) $(cmdline) ] ;
|
||||
|
||||
if $(add) {
|
||||
result += $(add[1]) ;
|
||||
cmdline = $(add[2]) ;
|
||||
matched = yes ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# If we haven't matched a feature prefix, just grab the command-line
|
||||
# argument itself. If we can map this argument to a feature
|
||||
# (e.g., -pthread -> <threading>multi), then do so; otherwise,
|
||||
# and add it to the list of "other" flags that we don't
|
||||
# understand.
|
||||
if ! $(matched) {
|
||||
match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$" : $(cmdline) ] ;
|
||||
local value = $(match[1]) ;
|
||||
cmdline = $(match[2]) ;
|
||||
|
||||
# Check for multithreading support
|
||||
if $(value) = "-pthread" || $(value) = "-pthreads"
|
||||
{
|
||||
result += "<threading>multi" ;
|
||||
|
||||
# DPG: This is a hack intended to work around a BBv2 bug where
|
||||
# requirements propagated from libraries are not checked for
|
||||
# conflicts when BBv2 determines which "common" properties to
|
||||
# apply to a target. In our case, the <threading>single property
|
||||
# gets propagated from the common properties to Boost.MPI
|
||||
# targets, even though <threading>multi is in the usage
|
||||
# requirements of <library>/mpi//mpi.
|
||||
MPI_EXTRA_REQUIREMENTS += "<threading>multi" ;
|
||||
}
|
||||
else if [ MATCH "(.*[a-zA-Z0-9<>?-].*)" : $(value) ] {
|
||||
otherflags += $(value) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# If there are other flags that we don't understand, add them to the
|
||||
# result as both <cxxflags> and <linkflags>
|
||||
if $(otherflags) {
|
||||
for unknown in $(unknown-features)
|
||||
{
|
||||
result += "$(unknown)$(otherflags)" ;
|
||||
}
|
||||
}
|
||||
|
||||
return $(result) ;
|
||||
}
|
||||
|
||||
# Determine if it is safe to execute the given shell command by trying
|
||||
# to execute it and determining whether the exit code is zero or
|
||||
# not. Returns true for an exit code of zero, false otherwise.
|
||||
local rule safe-shell-command ( cmdline )
|
||||
{
|
||||
local result = [ SHELL "$(cmdline) > /dev/null 2>/dev/null; if [ "$?" -eq "0" ]; then echo SSCOK; fi" ] ;
|
||||
return [ MATCH ".*(SSCOK).*" : $(result) ] ;
|
||||
}
|
||||
|
||||
# Initialize the MPI module.
|
||||
rule init ( mpicxx ? : options * : mpirun-with-options * )
|
||||
{
|
||||
if ! $(options)
|
||||
{
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
ECHO "===============MPI Auto-configuration===============" ;
|
||||
}
|
||||
|
||||
# Try to auto-detect options based on the wrapper compiler
|
||||
local command = [ common.get-invocation-command mpi : mpic++ : $(mpicxx) ] ;
|
||||
|
||||
if ! $(mpicxx) && ! $(command)
|
||||
{
|
||||
# Try "mpiCC", which is used by MPICH
|
||||
command = [ common.get-invocation-command mpi : mpiCC ] ;
|
||||
}
|
||||
|
||||
local result ;
|
||||
local compile_flags ;
|
||||
local link_flags ;
|
||||
|
||||
if ! $(command)
|
||||
{
|
||||
# Do nothing: we'll complain later
|
||||
}
|
||||
# OpenMPI and newer versions of LAM-MPI have -showme:compile and
|
||||
# -showme:link.
|
||||
else if [ safe-shell-command "$(command) -showme:compile" ] &&
|
||||
[ safe-shell-command "$(command) -showme:link" ]
|
||||
{
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
ECHO "Found recent LAM-MPI or Open MPI wrapper compiler: $(command)" ;
|
||||
}
|
||||
|
||||
compile_flags = [ SHELL "$(command) -showme:compile" ] ;
|
||||
link_flags = [ SHELL "$(command) -showme:link" ] ;
|
||||
|
||||
# Prepend COMPILER as the executable name, to match the format of
|
||||
# other compilation commands.
|
||||
compile_flags = "COMPILER $(compile_flags)" ;
|
||||
link_flags = "COMPILER $(link_flags)" ;
|
||||
}
|
||||
# Look for LAM-MPI's -showme
|
||||
else if [ safe-shell-command "$(command) -showme" ]
|
||||
{
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
ECHO "Found older LAM-MPI wrapper compiler: $(command)" ;
|
||||
}
|
||||
|
||||
result = [ SHELL "$(command) -showme" ] ;
|
||||
}
|
||||
# Look for MPICH
|
||||
else if [ safe-shell-command "$(command) -show" ]
|
||||
{
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
ECHO "Found MPICH wrapper compiler: $(command)" ;
|
||||
}
|
||||
compile_flags = [ SHELL "$(command) -compile_info" ] ;
|
||||
link_flags = [ SHELL "$(command) -link_info" ] ;
|
||||
}
|
||||
|
||||
if $(result) || $(compile_flags) && $(link_flags)
|
||||
{
|
||||
if $(result)
|
||||
{
|
||||
result = [ strip-eol $(result) ] ;
|
||||
options = [ cmdline_to_features $(result) ] ;
|
||||
}
|
||||
else
|
||||
{
|
||||
compile_flags = [ strip-eol $(compile_flags) ] ;
|
||||
link_flags = [ strip-eol $(link_flags) ] ;
|
||||
|
||||
# Separately process compilation and link features, then combine
|
||||
# them at the end.
|
||||
local compile_features = [ cmdline_to_features $(compile_flags)
|
||||
: "<cxxflags>" ] ;
|
||||
local link_features = [ cmdline_to_features $(link_flags)
|
||||
: "<linkflags>" ] ;
|
||||
options = $(compile_features) $(link_features) ;
|
||||
}
|
||||
|
||||
# If requested, display MPI configuration information.
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
if $(result)
|
||||
{
|
||||
ECHO " Wrapper compiler command line: $(result)" ;
|
||||
}
|
||||
else
|
||||
{
|
||||
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
|
||||
: $(compile_flags) ] ;
|
||||
ECHO "MPI compilation flags: $(match[2])" ;
|
||||
local match = [ MATCH "^([^\" ]+|\"[^\"]+\") *(.*)$"
|
||||
: $(link_flags) ] ;
|
||||
ECHO "MPI link flags: $(match[2])" ;
|
||||
}
|
||||
echo "MPI build features: " ;
|
||||
ECHO $(options) ;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if $(command)
|
||||
{
|
||||
ECHO "MPI auto-detection failed: unknown wrapper compiler $(command)" ;
|
||||
ECHO "Please report this error to the Boost mailing list: http://www.boost.org" ;
|
||||
}
|
||||
else if $(mpicxx)
|
||||
{
|
||||
ECHO "MPI auto-detection failed: unable to find wrapper compiler $(mpicxx)" ;
|
||||
}
|
||||
else
|
||||
{
|
||||
ECHO "MPI auto-detection failed: unable to find wrapper compiler `mpic++' or `mpiCC'" ;
|
||||
}
|
||||
ECHO "You will need to manually configure MPI support." ;
|
||||
}
|
||||
|
||||
if $(.debug-configuration)
|
||||
{
|
||||
ECHO "====================================================" ;
|
||||
}
|
||||
}
|
||||
|
||||
# Find mpirun (or its equivalent) and its flags
|
||||
.mpirun =
|
||||
[ common.get-invocation-command mpi : mpirun : $(mpirun-with-options[1]) ] ;
|
||||
.mpirun_flags = $(mpirun-with-options[2-]) ;
|
||||
.mpirun_flags ?= -np ;
|
||||
|
||||
if $(options)
|
||||
{
|
||||
.configured = true ;
|
||||
|
||||
# Set up the "mpi" alias
|
||||
alias mpi : : : : $(options) ;
|
||||
}
|
||||
}
|
||||
|
||||
# States whether MPI has bee configured
|
||||
rule configured ( )
|
||||
{
|
||||
return $(.configured) ;
|
||||
}
|
||||
|
||||
# Returs the "extra" requirements needed to build MPI. These requirements are
|
||||
# part of the /mpi//mpi library target, but they need to be added to anything
|
||||
# that uses MPI directly to work around bugs in BBv2's propagation of
|
||||
# requirements.
|
||||
rule extra-requirements ( )
|
||||
{
|
||||
return $(MPI_EXTRA_REQUIREMENTS) ;
|
||||
}
|
||||
|
||||
# Support for testing; borrowed from Python
|
||||
type.register RUN_MPI_OUTPUT ;
|
||||
type.register RUN_MPI : : TEST ;
|
||||
|
||||
class mpi-test-generator : generator
|
||||
{
|
||||
import property-set ;
|
||||
|
||||
rule __init__ ( * : * )
|
||||
{
|
||||
generator.__init__ $(1) : $(2) : $(3) : $(4) : $(5) : $(6) : $(7) : $(8) : $(9) ;
|
||||
self.composing = true ;
|
||||
}
|
||||
|
||||
rule run ( project name ? : property-set : sources * : multiple ? )
|
||||
{
|
||||
# Generate an executable from the sources. This is the executable we will run.
|
||||
local executable =
|
||||
[ generators.construct $(project) $(name) : EXE : $(property-set) : $(sources) ] ;
|
||||
|
||||
result =
|
||||
[ construct-result $(executable[2-]) : $(project) $(name)-run : $(property-set) ] ;
|
||||
}
|
||||
}
|
||||
|
||||
# Use mpi-test-generator to generate MPI tests from sources
|
||||
generators.register
|
||||
[ new mpi-test-generator mpi.capture-output : : RUN_MPI_OUTPUT ] ;
|
||||
|
||||
generators.register-standard testing.expect-success
|
||||
: RUN_MPI_OUTPUT : RUN_MPI ;
|
||||
|
||||
# The number of processes to spawn when executing an MPI test.
|
||||
feature mpi:processes : : free incidental ;
|
||||
|
||||
# The flag settings on testing.capture-output do not
|
||||
# apply to mpi.capture output at the moment.
|
||||
# Redo this explicitly.
|
||||
toolset.flags mpi.capture-output ARGS <testing.arg> ;
|
||||
rule capture-output ( target : sources * : properties * )
|
||||
{
|
||||
# Use the standard capture-output rule to run the tests
|
||||
testing.capture-output $(target) : $(sources[1]) : $(properties) ;
|
||||
|
||||
# Determine the number of processes we should run on.
|
||||
local num_processes = [ property.select <mpi:processes> : $(properties) ] ;
|
||||
num_processes = $(num_processes:G=) ;
|
||||
|
||||
# We launch MPI processes using the "mpirun" equivalent specified by the user.
|
||||
LAUNCHER on $(target) =
|
||||
[ on $(target) return $(.mpirun) $(.mpirun_flags) $(num_processes) ] ;
|
||||
}
|
||||
|
||||
# Creates a set of test cases to be run through the MPI launcher. The name, sources,
|
||||
# and requirements are the same as for any other test generator. However, schedule is
|
||||
# a list of numbers, which indicates how many processes each test run will use. For
|
||||
# example, passing 1 2 7 will run the test with 1 process, then 2 processes, then 7
|
||||
# 7 processes. The name provided is just the base name: the actual tests will be
|
||||
# the name followed by a hypen, then the number of processes.
|
||||
rule mpi-test ( name : sources * : requirements * : schedule * )
|
||||
{
|
||||
sources ?= $(name).cpp ;
|
||||
schedule ?= 1 2 3 4 7 8 13 17 ;
|
||||
|
||||
local result ;
|
||||
for processes in $(schedule)
|
||||
{
|
||||
result += [ testing.make-test
|
||||
run-mpi : $(sources) /boost/mpi//boost_mpi
|
||||
: $(requirements) <mpi:processes>$(processes) : $(name)-$(processes) ] ;
|
||||
}
|
||||
return $(result) ;
|
||||
}
|
||||
Reference in New Issue
Block a user