Lots of cleanup of obsolete files and updating of build scripts (and

sources) to make for easier to use build and use. Still in progress..

* Added build options and logic to set boost-root, boost-build, and
install location.
* Added general build script to install all the various testing utility
programs.
* Refactored process_jam_log so that it can be shared and incorporated
directly into other programs (i.e. library_status).
* Redoing library_status to be single program run that does the complete
build+process+generate data for a library.
This commit is contained in:
Rene Rivera
2015-04-07 10:26:30 -05:00
parent 29c8ce2d63
commit 4e68ae0d6c
156 changed files with 265 additions and 22224 deletions

1
.gitignore vendored
View File

@@ -1 +1,2 @@
/bin/
/stage/

View File

@@ -1,11 +1,14 @@
# Copyright Rene Rivera 2015
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
import modules ;
use-project /boost : [ MATCH --boost-root=(.*) : [ modules.peek : ARGV ] ] ;
path-constant BOOST_ROOT : [ modules.peek : BOOST_ROOT ] ;
path-constant INSTALL_PREFIX : [ modules.peek : INSTALL_PREFIX ] ;
path-constant INSTALL_PREFIX_EXEC : [ modules.peek : INSTALL_PREFIX_EXEC ] ;
use-project /boost : $(BOOST_ROOT) ;
project boost_regression
:

View File

@@ -1,5 +1,57 @@
# Copyright Rene Rivera 2014
# Copyright Rene Rivera 2014-2015
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
boost-build [ MATCH --boost-build=(.*) : $(ARGV) ] ;
rule get-dir-option ( option )
{
local path
= [ MATCH $(option)=(.*) : $(ARGV) ] ;
local is-rooted
= [ MATCH "^([/]+)" : $(path) ] [ MATCH "^(.:\\[^\\])" : $(path) ] ;
if ! $(is-rooted)
{
local pwd = [ PWD ] ;
path = $(pwd)/$(path) ;
}
# ECHO @@@ $(option) ==> $(path) ;
return $(path) ;
}
BOOST_ROOT ?= [ get-dir-option --boost-root ] ;
BOOST_BUILD ?= [ get-dir-option --boost-build ] ;
BOOST_BUILD ?= $(BOOST_ROOT)/tools/build/src ;
INSTALL_PREFIX ?= $(PREFIX) ;
INSTALL_PREFIX ?= [ get-dir-option --prefix ] ;
INSTALL_PREFIX ?= $(.boost-build-file:D)/stage ;
INSTALL_PREFIX_EXEC ?= $(EPREFIX) ;
INSTALL_PREFIX_EXEC ?= [ get-dir-option --exec-prefix ] ;
INSTALL_PREFIX_EXEC ?= $(INSTALL_PREFIX)/bin ;
local error ;
local help ;
if ( --help in $(ARGV) ) || ( -h in $(ARGV) )
{
help = YES ;
}
if ! $(error) && ! $(help) && ! $(BOOST_ROOT)
{
error = "Boost root not specified or found." ;
}
if ! $(error) && ! $(help) && ! $(BOOST_BUILD)
{
error = "Boost Build not specified or found." ;
}
if $(error) || $(help)
{
EXIT
"\nERROR: $(error)\n"
"\nUsage:"
"b2"
"--boost-root=<BOOST_ROOT>"
"[ --boost-build=<BOOST_BUILD> ]"
"[ --prefix=<PREFIX> ]"
"[ --exec-prefix=<EPREFIX> ]"
"\n" ;
}
boost-build $(BOOST_BUILD) ;

12
build/Jamfile.jam Normal file
View File

@@ -0,0 +1,12 @@
# Regression test status reporting tools build Jamfile
# Copyright Rene Rivera 2015
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
alias install
:
../library_status/build//install
../reports/build//install
../testing/build//install
;

View File

@@ -1,68 +0,0 @@
# Regression test status reporting tools build Jamfile
# Copyright Rene Rivera 2014
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
import modules ;
use-project /boost : [ MATCH --boost-root=(.*) : [ modules.peek : ARGV ] ] ;
project boost_regression
:
source-location ../src
:
requirements
<dependency>/boost//headers
;
obj tiny_xml
:
detail/tiny_xml.cpp
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
:
release
;
explicit tiny_xml ;
exe process_jam_log
:
process_jam_log.cpp
tiny_xml
/boost/filesystem//boost_filesystem/<link>static
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
:
release
;
#~ explicit process_jam_log ;
exe boost_report
:
[ glob report/*.cpp ]
/boost/filesystem//boost_filesystem/<link>static
/boost//filesystem/<link>static
/boost//date_time/<link>static
/boost//regex/<link>static
/boost//program_options/<link>static
/boost//iostreams/<link>static
:
<define>BOOST_ALL_NO_LIB=1
<implicit-dependency>/boost//headers
:
release
;
explicit boost_report ;
alias install : bin ;
install bin : process_jam_log/<variant>release ;
explicit install bin ;
install bin_boost_report : boost_report/<variant>release ;
explicit bin_boost_report ;

View File

@@ -1,17 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" />
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>../../../../..</AdditionalIncludeDirectories>
<PreprocessorDefinitions>BOOST_ALL_STATIC_LINK;BOOST_LIGHTWEIGHT_TEST_OSTREAM=std::cout;BOOST_ASSERT_MSG_OSTREAM=std::cout;_UNICODE;UNICODE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ExceptionHandling>Async</ExceptionHandling>
</ClCompile>
<Link>
<AdditionalLibraryDirectories>C:\boost\modular-boost\stage\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
</ItemDefinitionGroup>
<ItemGroup />
</Project>

View File

@@ -1,87 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{56EFEFAD-F90F-410A-99AF-18522AF8D669}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>compiler_status</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\common.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\common.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>true</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>
</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\..\src\compiler_status.cpp" />
<ClCompile Include="..\..\..\src\detail\tiny_xml.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,89 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>process_jam_log</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<PlatformToolset>v120</PlatformToolset>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\common.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\common.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>true</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<SDLCheck>true</SDLCheck>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>
</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<SDLCheck>true</SDLCheck>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\..\src\detail\tiny_xml.cpp" />
<ClCompile Include="..\..\..\src\process_jam_log.cpp" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,28 +0,0 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Express 2013 for Windows Desktop
VisualStudioVersion = 12.0.21005.1
MinimumVisualStudioVersion = 10.0.40219.1
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "compiler_status", "compiler_status\compiler_status.vcxproj", "{56EFEFAD-F90F-410A-99AF-18522AF8D669}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "process_jam_log", "process_jam_log\process_jam_log.vcxproj", "{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Release|Win32 = Release|Win32
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{56EFEFAD-F90F-410A-99AF-18522AF8D669}.Debug|Win32.ActiveCfg = Debug|Win32
{56EFEFAD-F90F-410A-99AF-18522AF8D669}.Debug|Win32.Build.0 = Debug|Win32
{56EFEFAD-F90F-410A-99AF-18522AF8D669}.Release|Win32.ActiveCfg = Release|Win32
{56EFEFAD-F90F-410A-99AF-18522AF8D669}.Release|Win32.Build.0 = Release|Win32
{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}.Debug|Win32.ActiveCfg = Debug|Win32
{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}.Debug|Win32.Build.0 = Debug|Win32
{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}.Release|Win32.ActiveCfg = Release|Win32
{D54B9FB9-88C1-4D69-A6BD-0790793B30FC}.Release|Win32.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal

View File

@@ -5,17 +5,30 @@
project common
:
: requirements
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
: usage-requirements
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
<include>../src
;
obj tiny_xml
:
../src/tiny_xml.cpp
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
;
explicit tiny_xml ;
obj process_jam_log
:
../src/process_jam_log.cpp
:
:
:
<source>tiny_xml
<library>/boost/filesystem//boost_filesystem/<link>static
;
explicit process_jam_log ;

View File

@@ -1,6 +1,8 @@
// process jam regression test output into XML -----------------------------//
// Copyright Beman Dawes 2002. Distributed under the Boost
// Copyright Beman Dawes 2002.
// Copyright Rene Rivera 2015.
// Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
@@ -10,8 +12,8 @@
#include <boost/config/warning_disable.hpp>
#include "detail/tiny_xml.hpp"
#include "detail/common.hpp"
#include "tiny_xml.hpp"
#include "common.hpp"
#include "boost/filesystem/operations.hpp"
#include "boost/filesystem/fstream.hpp"
#include "boost/filesystem/exception.hpp"
@@ -25,6 +27,7 @@
#include <ctime>
#include <cctype> // for tolower
#include <cstdlib> // for exit
#include <vector>
using std::string;
namespace xml = boost::tiny_xml;
@@ -600,7 +603,7 @@ namespace
// main --------------------------------------------------------------------//
int main( int argc, char ** argv )
int process_jam_log( const std::vector<std::string> & args )
{
// Turn off synchronization with corresponding C standard library files. This
// gives a significant speed improvement on platforms where the standard C++
@@ -610,93 +613,75 @@ int main( int argc, char ** argv )
fs::initial_path();
std::istream* input = 0;
if ( argc <= 1 )
if ( args.size() <= 1 )
{
std::cout << "process_jam_log [--echo] [--create-directories] [--v1|--v2]\n"
" [--boost-root boost_root] [--locate-root locate_root]\n"
" [--input-file input_file]\n"
" [locate-root]\n"
"--echo - verbose diagnostic output.\n"
"--create-directories - if the directory for xml file doesn't exists - creates it.\n"
" usually used for processing logfile on different machine\n"
"--v2 - bjam version 2 used (default).\n"
"--v1 - bjam version 1 used.\n"
"--boost-root - the root of the boost installation being used. If not defined\n"
" assume to run from within it and discover it heuristically.\n"
"--locate-root - the same as the bjam ALL_LOCATE_TARGET\n"
" parameter, if any. Default is boost-root.\n"
"--input-file - the output of a bjam --dump-tests run. Default is std input.\n"
;
std::cout <<
"process_jam_log [options...]\n"
" options:\n"
" --echo - verbose diagnostic output.\n"
" --create-directories - if the directory for xml file doesn't exists - creates it.\n"
" usually used for processing logfile on different machine\n"
" --boost-root path - the root of the boost installation being used. If not defined\n"
" assume to run from within it and discover it heuristically.\n"
" --locate-root path - the same as the bjam ALL_LOCATE_TARGET\n"
" parameter, if any. Default is boost-root.\n"
" --input-file path - the output of a bjam --dump-tests run. Default is std input.\n"
;
return 1;
}
while ( argc > 1 )
std::vector<std::string>::const_iterator args_i = args.begin();
std::vector<std::string>::const_iterator args_e = args.end();
for(++args_i; args_i < args_e; ++args_i)
{
if ( std::strcmp( argv[1], "--echo" ) == 0 )
if ( *args_i == "--echo" )
{
echo = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--create-directories" ) == 0 )
else if ( *args_i == "--create-directories" )
{
create_dirs = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--v2" ) == 0 )
else if ( *args_i == "--boost-root" )
{
boost_build_v2 = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--v1" ) == 0 )
{
boost_build_v2 = false;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--boost-root" ) == 0 )
{
--argc; ++argv;
if ( argc == 1 )
++args_i;
if ( args_i == args_e )
{
std::cout << "Abort: option --boost-root requires a directory argument\n";
std::exit(1);
}
boost_root = fs::path( argv[1] );
boost_root = fs::path( *args_i );
if ( !boost_root.is_complete() )
boost_root = ( fs::initial_path() / boost_root ).normalize();
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--locate-root" ) == 0 )
else if ( *args_i == "--locate-root" )
{
--argc; ++argv;
if ( argc == 1 )
++args_i;
if ( args_i == args_e )
{
std::cout << "Abort: option --locate-root requires a directory argument\n";
std::exit(1);
}
locate_root = fs::path( argv[1] );
--argc; ++argv;
locate_root = fs::path( *args_i );
}
else if ( std::strcmp( argv[1], "--input-file" ) == 0 )
else if ( *args_i == "--input-file" )
{
--argc; ++argv;
if ( argc == 1 )
++args_i;
if ( args_i == args_e )
{
std::cout << "Abort: option --input-file requires a filename argument\n";
std::exit(1);
}
input = new std::ifstream(argv[1]);
--argc; ++argv;
input = new std::ifstream( *args_i );
}
else if ( *argv[1] == '-' )
else if ( (*args_i)[1] == '-' )
{
std::cout << "Abort: unknown option; invoke with no arguments to see list of valid options\n";
std::cout << "Abort: unknown option '" << *args_i << "'; invoke with no arguments to see list of valid options\n";
return 1;
}
else
{
locate_root = fs::path( argv[1] );
--argc; ++argv;
locate_root = fs::path( *args_i );
}
}

View File

@@ -1 +0,0 @@
/bin/

View File

@@ -1,20 +0,0 @@
# Copyright Rene Rivera 2015
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
exe compiler_status
:
../src/compiler_status.cpp
../../common/build//tiny_xml
/boost/filesystem//boost_filesystem/<link>static
:
<define>BOOST_ALL_NO_LIB=1
<implicit-dependency>/boost//headers
;
explicit compiler_status ;
alias install : bin ;
install bin : compiler_status/<variant>release ;
explicit install bin ;

File diff suppressed because it is too large Load Diff

View File

@@ -7,6 +7,7 @@
exe library_status
:
../src/library_status.cpp
../../common/build//process_jam_log
../../common/build//tiny_xml
/boost/filesystem//boost_filesystem/<link>static
:
@@ -15,6 +16,12 @@ exe library_status
;
explicit library_status ;
alias install : bin ;
install bin : library_status/<variant>release ;
explicit install bin ;
alias install : exec ;
install exec
:
library_status/<variant>release
:
<install-type>EXE
<location>$(INSTALL_PREFIX_EXEC)
;
explicit install exec ;

View File

@@ -51,6 +51,8 @@ namespace xml = boost::tiny_xml;
using std::string;
extern int process_jam_log( const std::vector<std::string> & args );
const string pass_msg( "Pass" );
const string warn_msg( "<i>Warn</i>" );
const string fail_msg( "<font color=\"#FF0000\"><i>Fail</i></font>" );
@@ -754,44 +756,48 @@ namespace
}
}// unnamed namespace
// main --------------------------------------------------------------------//
#define BOOST_NO_CPP_MAIN_SUCCESS_MESSAGE
#include <boost/test/included/prg_exec_monitor.hpp>
int cpp_main( int argc, char * argv[] ) // note name!
int library_status( const std::vector<std::string> & args )
{
fs::path initial_path = fs::initial_path();
fs::path status_file_path("library_status.html");
fs::path links_file_path("library_status_links.html");
while ( argc > 1 && *argv[1] == '-' )
bool args_error = false;
std::vector<std::string>::const_iterator args_i = args.begin();
std::vector<std::string>::const_iterator args_e = args.end();
for(++args_i; args_i < args_e && !args_error; ++args_i)
{
if ( argc > 2 && std::strcmp( argv[1], "--compiler" ) == 0 )
{ specific_compiler = argv[2]; --argc; ++argv; }
else if ( argc > 2 && std::strcmp( argv[1], "--locate-root" ) == 0 )
{ locate_root = fs::path( argv[2] ); --argc; ++argv; }
else if ( std::strcmp( argv[1], "--ignore-pass" ) == 0 ) ignore_pass = true;
else if ( std::strcmp( argv[1], "--no-warn" ) == 0 ) no_warn = true;
else if ( std::strcmp( argv[1], "--v2" ) == 0 )
{--argc; ++argv ;} // skip
else if ( argc > 2 && std::strcmp( argv[1], "--jamfile" ) == 0)
{--argc; ++argv;} // skip
else { std::cerr << "Unknown option: " << argv[1] << "\n"; argc = 1; }
--argc;
++argv;
if ( *args_i == "--compiler" )
{ ++args_i; specific_compiler = *args_i; }
else if ( *args_i == "--ignore-pass" ) ignore_pass = true;
else if ( *args_i == "--no-warn" ) no_warn = true;
else if ( *args_i == "--locate-root" )
{ ++args_i; locate_root = fs::path( *args_i ); }
else if ( *args_i == "--status-file" )
{ ++args_i; status_file_path = fs::path( *args_i ); }
else if ( *args_i == "--links-file" )
{ ++args_i; links_file_path = fs::path( *args_i ); }
else
{ std::cerr << "Unknown option: " << *args_i << "\n"; args_error = true ; }
}
if ( argc != 2 && argc != 3 )
if ( args_error )
{
std::cerr <<
"Usage: library_status [options...] status-file [links-file]\n"
"Usage: library_status [options...]\n"
" boost-root is the path to the boost tree root directory.\n"
" status-file and links-file are paths to the output files.\n"
" options: --compiler name Run for named compiler only\n"
" --ignore-pass Do not report tests which pass all compilers\n"
" --no-warn Warnings not reported if test passes\n"
" --locate-root path Path to ALL_LOCATE_TARGET for bjam;\n"
" default boost-root.\n"
"Example: library_status --compiler gcc /boost-root cs.html cs-links.html\n"
" options:\n"
" --compiler name Run for named compiler only\n"
" --ignore-pass Do not report tests which pass all compilers\n"
" --no-warn Warnings not reported if test passes\n"
" --locate-root path Path to ALL_LOCATE_TARGET for bjam;\n"
" default boost-root.\n"
" --status-file path Path to output html file;\n"
" default library_status.html.\n"
" --links-file path Path to output html file;\n"
" default library_status_links.html.\n"
"Example: library_status --compiler gcc --locate-root /boost-root\n"
"Note: Only the leaf of the links-file path is\n"
"used in status-file HTML links. Thus for browsing, status-file,\n"
"links-file must be in the same directory.\n"
@@ -803,25 +809,20 @@ int cpp_main( int argc, char * argv[] ) // note name!
if(! fs::exists("bin") && ! fs::exists("bin.v2"))
locate_root = find_boost_root(initial_path);
report.open( fs::path( argv[1] ) );
report.open( status_file_path );
if ( !report )
{
std::cerr << "Could not open report output file: " << argv[2] << std::endl;
std::cerr << "Could not open report output file: " << status_file_path << std::endl;
return 1;
}
if ( argc == 3 )
links_name = links_file_path.filename().string();
links_file.open( links_file_path );
if ( !links_file )
{
fs::path links_path( argv[2] );
links_name = links_path.filename().string();
links_file.open( links_path );
if ( !links_file )
{
std::cerr << "Could not open links output file: " << argv[3] << std::endl;
return 1;
}
std::cerr << "Could not open links output file: " << links_file_path << std::endl;
return 1;
}
else no_links = true;
const string library_name = find_lib_name(initial_path);
@@ -888,3 +889,58 @@ int cpp_main( int argc, char * argv[] ) // note name!
}
return 0;
}
// main --------------------------------------------------------------------//
#define BOOST_NO_CPP_MAIN_SUCCESS_MESSAGE
#include <boost/test/included/prg_exec_monitor.hpp>
int cpp_main( int argc, char * argv[] ) // note name!
{
std::vector<std::string> process_jam_log_args;
std::vector<std::string> library_status_args;
process_jam_log_args.push_back("process_jam_log");
library_status_args.push_back("library_status");
--argc; ++argv;
while ( argc > 0 )
{
std::string arg = *argv;
if (arg == "--compiler" ||
arg == "--status-file" ||
arg == "--links-file")
{
library_status_args.push_back(*argv);
--argc; ++argv;
library_status_args.push_back(*argv);
--argc; ++argv;
}
else if (arg == "--ignore-pass" ||
arg == "--no-warn")
{
library_status_args.push_back(*argv);
--argc; ++argv;
}
else if (arg == "--locate-root")
{
process_jam_log_args.push_back(*argv);
library_status_args.push_back(*argv);
--argc; ++argv;
}
else if (arg == "--echo" ||
arg == "--create-directories")
{
process_jam_log_args.push_back(*argv);
--argc; ++argv;
}
else if (arg == "--boost-root" ||
arg == "--input-file")
{
process_jam_log_args.push_back(*argv);
--argc; ++argv;
}
}
int result = 0;
if (!result) result = process_jam_log(process_jam_log_args);
if (!result) result = library_status(library_status_args);
return result;
}

View File

@@ -1,85 +0,0 @@
if test $# -eq 0
then
echo "Usage: $0 <bjam arguments>"
echo "Typical bjam arguments are:"
echo " toolset=msvc-7.1,gcc"
echo " variant=debug,release,profile"
echo " link=static,shared"
echo " threading=single,multi"
echo
echo "note: make sure this script is run from boost root directory !!!"
exit 1
fi
if ! test -e libs
then
echo No libs directory found. Run from boost root directory !!!
exit 1
fi
#html header
cat <<end >status/library_status_contents.html
<!doctype HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<!--
(C) Copyright 2007 Robert Ramey - http://www.rrsd.com .
Use, modification and distribution is subject to the Boost Software
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
-->
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<link rel="stylesheet" type="text/css" href="../boost.css">
<title>Library Status Contents</title>
<body>
end
cd >nul libs
# runtests, create library pages, and body of summary page
for lib_name in *
do
if test -d $lib_name
then
cd >nul $lib_name
if test -e "test/Jamfile.v2"
then
cd >nul test
echo $lib_name
echo >>../../../status/library_status_contents.html "<a target=\"detail\" href=\"../libs/$lib_name/test/library_status.html\">$lib_name</a><br>"
../../../tools/regression/src/library_test.sh $@
cd >nul ..
fi
for sublib_name in *
do
if test -d $sublib_name
then
cd >nul $sublib_name
if test -e "test/Jamfile.v2"
then
cd >nul test
echo $lib_name/$sublib_name
echo >>../../../../status/library_status_contents.html "<a target=\"detail\" href=\"../libs/$lib_name/$sublib_name/test/library_status.html\">$lib_name/$sublib_name</a><br>"
../../../../tools/regression/src/library_test.sh $@
cd >nul ..
fi
cd >nul ..
fi
done
cd >nul ..
fi
done
cd >nul ..
#html trailer
cat <<end >>status/library_status_contents.html
</body>
</html>
end

View File

@@ -19,6 +19,12 @@ exe boost_report
;
explicit boost_report ;
alias install : bin ;
install bin : boost_report/<variant>release ;
explicit install bin ;
alias install : exec ;
install exec
:
boost_report/<variant>release
:
<install-type>EXE
<location>$(INSTALL_PREFIX_EXEC)
;
explicit install exec ;

View File

@@ -1,70 +0,0 @@
#!/bin/sh
#~ Copyright Redshift Software, Inc. 2007
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
export PATH=/bin:/usr/bin:${PATH}
timestamp=`date +%F-%H-%M-%S-%Z`
branch=$1
revision=`svn info file:///home/subversion/boost/${branch} | grep '^Revision:' | cut --byte=11-`
tag=boost-${1/\/*}-${timestamp}
export_dir=boost-$$
# Remove files as listed in stdin, the assumption is that processing
# of the file is complete and can be removed.
rm_c()
{
while read f; do
rm -f ${f}
done
}
# Generate the export file tree, and incrementally output the files
# created.
svn_export()
{
svn export -r ${revision} file:///home/subversion/boost/${branch} ${tag}
echo "Revision: ${revision}" > ${tag}/svn_info.txt
echo "---- ${tag}/svn_info.txt"
}
# Create the archive incrementally, deleting files as we are done
# adding them to the archive.
make_archive()
{
svn_export \
| cut --bytes=6- \
| star -c -D -to-stdout -d artype=pax list=- 2>/dev/null \
| bzip2 -6 -c \
| tee $1 \
| tar -jtf - \
| rm_c
}
run()
{
cd /tmp
rm -rf ${export_dir}
mkdir ${export_dir}
cd ${export_dir}
mkfifo out.tbz2
make_archive out.tbz2 &
cat out.tbz2
cd /tmp
rm -rf ${export_dir}
}
run_debug()
{
rm -rf ${export_dir}
mkdir ${export_dir}
cd ${export_dir}
mkfifo out.tbz2
make_archive out.tbz2 &
cat out.tbz2 > ../${tag}.tar.bz2
cd ..
rm -rf ${export_dir}
}
run
#run_debug

View File

@@ -1,546 +0,0 @@
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import xml.sax.saxutils
import zipfile
import ftplib
import time
import stat
import xml.dom.minidom
import xmlrpclib
import httplib
import os.path
import string
import sys
import re
import urlparse
def process_xml_file( input_file, output_file ):
utils.log( 'Processing test log "%s"' % input_file )
f = open( input_file, 'r' )
xml = f.readlines()
f.close()
for i in range( 0, len(xml)):
xml[i] = string.translate( xml[i], utils.char_translation_table )
output_file.writelines( xml )
def process_test_log_files( output_file, dir, names ):
for file in names:
if os.path.basename( file ) == 'test_log.xml':
process_xml_file( os.path.join( dir, file ), output_file )
def collect_test_logs( input_dirs, test_results_writer ):
__log__ = 1
utils.log( 'Collecting test logs ...' )
for input_dir in input_dirs:
utils.log( 'Walking directory "%s" ...' % input_dir )
os.path.walk( input_dir, process_test_log_files, test_results_writer )
dart_status_from_result = {
'succeed': 'passed',
'fail': 'failed',
'note': 'passed',
'': 'notrun'
}
dart_project = {
'trunk': 'Boost_HEAD',
'': 'Boost_HEAD'
}
dart_track = {
'full': 'Nightly',
'incremental': 'Continuous',
'': 'Experimental'
}
ascii_only_table = ""
for i in range(0,256):
if chr(i) == '\n' or chr(i) == '\r':
ascii_only_table += chr(i)
elif i < 32 or i >= 0x80:
ascii_only_table += '?'
else:
ascii_only_table += chr(i)
class xmlrpcProxyTransport(xmlrpclib.Transport):
def __init__(self, proxy):
self.proxy = proxy
def make_connection(self, host):
self.realhost = host
return httplib.HTTP(self.proxy)
def send_request(self, connection, handler, request_body):
connection.putrequest('POST','http://%s%s' % (self.realhost,handler))
def send_host(self, connection, host):
connection.putheader('Host',self.realhost)
def publish_test_logs(
input_dirs,
runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
dart_server = None,
http_proxy = None,
**unused
):
__log__ = 1
utils.log( 'Publishing test logs ...' )
dart_rpc = None
dart_dom = {}
def _publish_test_log_files_ ( unused, dir, names ):
for file in names:
if os.path.basename( file ) == 'test_log.xml':
utils.log( 'Publishing test log "%s"' % os.path.join(dir,file) )
if dart_server:
log_xml = open(os.path.join(dir,file)).read().translate(ascii_only_table)
#~ utils.log( '--- XML:\n%s' % log_xml)
#~ It seems possible to get an empty XML result file :-(
if log_xml == "": continue
log_dom = xml.dom.minidom.parseString(log_xml)
test = {
'library': log_dom.documentElement.getAttribute('library'),
'test-name': log_dom.documentElement.getAttribute('test-name'),
'toolset': log_dom.documentElement.getAttribute('toolset')
}
if not test['test-name'] or test['test-name'] == '':
test['test-name'] = 'unknown'
if not test['toolset'] or test['toolset'] == '':
test['toolset'] = 'unknown'
if not dart_dom.has_key(test['toolset']):
dart_dom[test['toolset']] = xml.dom.minidom.parseString(
'''<?xml version="1.0" encoding="UTF-8"?>
<DartSubmission version="2.0" createdby="collect_and_upload_logs.py">
<Site>%(site)s</Site>
<BuildName>%(buildname)s</BuildName>
<Track>%(track)s</Track>
<DateTimeStamp>%(datetimestamp)s</DateTimeStamp>
</DartSubmission>
''' % {
'site': runner_id,
'buildname': "%s -- %s (%s)" % (platform,test['toolset'],run_type),
'track': dart_track[run_type],
'datetimestamp' : timestamp
} )
submission_dom = dart_dom[test['toolset']]
for node in log_dom.documentElement.childNodes:
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
if node.firstChild:
log_data = xml.sax.saxutils.escape(node.firstChild.data)
else:
log_data = ''
test_dom = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
<Test>
<Name>.Test.Boost.%(tag)s.%(library)s.%(test-name)s.%(type)s</Name>
<Status>%(result)s</Status>
<Measurement name="Toolset" type="text/string">%(toolset)s</Measurement>
<Measurement name="Timestamp" type="text/string">%(timestamp)s</Measurement>
<Measurement name="Log" type="text/text">%(log)s</Measurement>
</Test>
''' % {
'tag': tag,
'library': test['library'],
'test-name': test['test-name'],
'toolset': test['toolset'],
'type': node.nodeName,
'result': dart_status_from_result[node.getAttribute('result')],
'timestamp': node.getAttribute('timestamp'),
'log': log_data
})
submission_dom.documentElement.appendChild(
test_dom.documentElement.cloneNode(1) )
for input_dir in input_dirs:
utils.log( 'Walking directory "%s" ...' % input_dir )
os.path.walk( input_dir, _publish_test_log_files_, None )
if dart_server:
try:
rpc_transport = None
if http_proxy:
rpc_transport = xmlrpcProxyTransport(http_proxy)
dart_rpc = xmlrpclib.ServerProxy(
'http://%s/%s/Command/' % (dart_server,dart_project[tag]),
rpc_transport )
for dom in dart_dom.values():
#~ utils.log('Dart XML: %s' % dom.toxml('utf-8'))
dart_rpc.Submit.put(xmlrpclib.Binary(dom.toxml('utf-8')))
except Exception, e:
utils.log('Dart server error: %s' % e)
def upload_to_ftp( tag, results_file, ftp_proxy, debug_level, ftp_url ):
if not ftp_url:
ftp_host = 'boost.cowic.de'
ftp_url = ''.join(['ftp','://anonymous','@',ftp_host,'/boost/do-not-publish-this-url/results/'])
utils.log( 'Uploading log archive "%s" to %s' % ( results_file, tag ) )
ftp_parts = urlparse.urlparse(ftp_url)
ftp_netloc = re.split('[@]',ftp_parts[1])
ftp_user = re.split('[:]',ftp_netloc[0])[0]
ftp_password = re.split('[:]',ftp_netloc[0]+':anonymous')[1]
ftp_site = re.split('[:]',ftp_netloc[1])[0]
ftp_path = ftp_parts[2]
if not ftp_proxy:
ftp = ftplib.FTP( ftp_site )
ftp.set_debuglevel( debug_level )
ftp.login( ftp_user, ftp_password )
else:
utils.log( ' Connecting through FTP proxy server "%s"' % ftp_proxy )
ftp = ftplib.FTP( ftp_proxy )
ftp.set_debuglevel( debug_level )
ftp.set_pasv (0) # turn off PASV mode
ftp.login( '%s@%s' % (ftp_user,ftp_site), ftp_password )
ftp.cwd( ftp_path )
try:
ftp.cwd( tag )
except ftplib.error_perm:
for dir in tag.split( '/' ):
ftp.mkd( dir )
ftp.cwd( dir )
f = open( results_file, 'rb' )
ftp.storbinary( 'STOR %s' % os.path.basename( results_file ), f )
ftp.quit()
def copy_comments( results_xml, comment_file ):
results_xml.startElement( 'comment', {} )
if os.path.exists( comment_file ):
utils.log( 'Reading comments file "%s"...' % comment_file )
f = open( comment_file, 'r' )
try:
results_xml.characters( f.read() )
finally:
f.close()
else:
utils.log( 'Warning: comment file "%s" is not found.' % comment_file )
lines = ['']
for arg in sys.argv:
# Make sure that the ftp details are hidden
arg = re.sub( 'ftp://.*$', 'ftp://XXXXX', arg )
# Escape quotes
arg = re.sub( r'(\\|")', r'\\\1', arg )
# Quote arguments if needed
if arg.find( ' ' ) != -1:
arg = '"%s"' % arg
if len( lines[-1] ) + len( arg ) + 2 >= 80:
# align backslashes
lines[-1] += ' ' * ( 79 - len( lines[-1] ) )
# indent lines after the first
lines.append( ' ' )
lines[-1] += ( arg + ' ' )
results_xml.characters( '<hr>' )
results_xml.characters( '<dl>' )
results_xml.characters( '<dt>Command Line</dt>' )
results_xml.characters( '<dd>' )
results_xml.characters( '<pre>' )
results_xml.characters( '\\\n'.join(lines) )
results_xml.characters( '</pre>' )
results_xml.characters( '</dd>' )
results_xml.characters( '</dl>\n' )
results_xml.endElement( 'comment' )
def compress_file( file_path, archive_path ):
utils.log( 'Compressing "%s"...' % file_path )
try:
z = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED )
z.write( file_path, os.path.basename( file_path ) )
z.close()
utils.log( 'Done writing "%s".'% archive_path )
except Exception, msg:
utils.log( 'Warning: Compressing falied (%s)' % msg )
utils.log( ' Trying to compress using a platform-specific tool...' )
try: import zip_cmd
except ImportError:
script_dir = os.path.dirname( os.path.abspath( sys.argv[0] ) )
utils.log( 'Could not find \'zip_cmd\' module in the script directory (%s).' % script_dir )
raise Exception( 'Compressing failed!' )
else:
if os.path.exists( archive_path ):
os.unlink( archive_path )
utils.log( 'Removing stale "%s".' % archive_path )
zip_cmd.main( file_path, archive_path )
utils.log( 'Done compressing "%s".' % archive_path )
def read_timestamp( file ):
if not os.path.exists( file ):
result = time.gmtime()
utils.log( 'Warning: timestamp file "%s" does not exist'% file )
utils.log( 'Using current UTC time (%s)' % result )
return result
return time.gmtime( os.stat( file ).st_mtime )
def collect_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, dart_server = None
, http_proxy = None
, revision = ''
, **unused
):
timestamp = time.strftime( '%Y-%m-%dT%H:%M:%SZ', read_timestamp( timestamp_file ) )
if dart_server:
publish_test_logs( [ results_dir ],
runner_id, tag, platform, comment_file, timestamp, user, source, run_type,
dart_server = dart_server,
http_proxy = http_proxy )
results_file = os.path.join( results_dir, '%s.xml' % runner_id )
results_writer = open( results_file, 'w' )
utils.log( 'Collecting test logs into "%s"...' % results_file )
results_xml = xml.sax.saxutils.XMLGenerator( results_writer )
results_xml.startDocument()
results_xml.startElement(
'test-run'
, {
'tag': tag
, 'platform': platform
, 'runner': runner_id
, 'timestamp': timestamp
, 'source': source
, 'run-type': run_type
, 'revision': revision
}
)
copy_comments( results_xml, comment_file )
collect_test_logs( [ results_dir ], results_writer )
results_xml.endElement( "test-run" )
results_xml.endDocument()
results_writer.close()
utils.log( 'Done writing "%s".' % results_file )
compress_file(
results_file
, os.path.join( results_dir,'%s.zip' % runner_id )
)
def upload_logs(
results_dir
, runner_id
, tag
, user
, ftp_proxy
, debug_level
, send_bjam_log = False
, timestamp_file = None
, dart_server = None
, ftp_url = None
, **unused
):
logs_archive = os.path.join( results_dir, '%s.zip' % runner_id )
upload_to_ftp( tag, logs_archive, ftp_proxy, debug_level, ftp_url )
if send_bjam_log:
bjam_log_path = os.path.join( results_dir, 'bjam.log' )
if not timestamp_file:
timestamp_file = bjam_log_path
timestamp = time.strftime( '%Y-%m-%d-%H-%M-%S', read_timestamp( timestamp_file ) )
logs_archive = os.path.join( results_dir, '%s.%s.log.zip' % ( runner_id, timestamp ) )
compress_file( bjam_log_path, logs_archive )
upload_to_ftp( '%s/logs' % tag, logs_archive, ftp_proxy, debug_level, ftp_url )
def collect_and_upload_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, revision = None
, ftp_proxy = None
, debug_level = 0
, send_bjam_log = False
, dart_server = None
, http_proxy = None
, ftp_url = None
, **unused
):
collect_logs(
results_dir
, runner_id
, tag
, platform
, comment_file
, timestamp_file
, user
, source
, run_type
, revision = revision
, dart_server = dart_server
, http_proxy = http_proxy
)
upload_logs(
results_dir
, runner_id
, tag
, user
, ftp_proxy
, debug_level
, send_bjam_log
, timestamp_file
, dart_server = dart_server
, ftp_url = ftp_url
)
def accept_args( args ):
args_spec = [
'locate-root='
, 'runner='
, 'tag='
, 'platform='
, 'comment='
, 'timestamp='
, 'source='
, 'run-type='
, 'user='
, 'ftp-proxy='
, 'proxy='
, 'debug-level='
, 'send-bjam-log'
, 'help'
, 'dart-server='
, 'revision='
, 'ftp='
]
options = {
'--tag' : 'trunk'
, '--platform' : sys.platform
, '--comment' : 'comment.html'
, '--timestamp' : 'timestamp'
, '--user' : None
, '--source' : 'SVN'
, '--run-type' : 'full'
, '--ftp-proxy' : None
, '--proxy' : None
, '--debug-level' : 0
, '--dart-server' : 'beta.boost.org:8081'
, '--revision' : None
, '--ftp' : None
}
utils.accept_args( args_spec, args, options, usage )
return {
'results_dir' : options[ '--locate-root' ]
, 'runner_id' : options[ '--runner' ]
, 'tag' : options[ '--tag' ]
, 'platform' : options[ '--platform']
, 'comment_file' : options[ '--comment' ]
, 'timestamp_file' : options[ '--timestamp' ]
, 'user' : options[ '--user' ]
, 'source' : options[ '--source' ]
, 'run_type' : options[ '--run-type' ]
, 'ftp_proxy' : options[ '--ftp-proxy' ]
, 'http_proxy' : options[ '--proxy' ]
, 'debug_level' : int(options[ '--debug-level' ])
, 'send_bjam_log' : options.has_key( '--send-bjam-log' )
, 'dart_server' : options[ '--dart-server' ]
, 'revision' : options[ '--revision' ]
, 'ftp' : options[ '--ftp' ]
}
commands = {
'collect-and-upload' : collect_and_upload_logs
, 'collect-logs' : collect_logs
, 'upload-logs' : upload_logs
}
def usage():
print 'Usage: %s [command] [options]' % os.path.basename( sys.argv[0] )
print '''
Commands:
\t%s
Options:
\t--locate-root directory to to scan for "test_log.xml" files
\t--runner runner ID (e.g. "Metacomm")
\t--timestamp path to a file which modification time will be used
\t as a timestamp of the run ("timestamp" by default)
\t--comment an HTML comment file to be inserted in the reports
\t ("comment.html" by default)
\t--tag the tag for the results ("trunk" by default)
\t--user SourceForge user name for a shell account (optional)
\t--source where Boost sources came from ("SVN" or "tarball";
\t "SVN" by default)
\t--run-type "incremental" or "full" ("full" by default)
\t--send-bjam-log in addition to regular XML results, send in full bjam
\t log of the regression run
\t--proxy HTTP proxy server address and port (e.g.
\t 'http://www.someproxy.com:3128', optional)
\t--ftp-proxy FTP proxy server (e.g. 'ftpproxy', optional)
\t--debug-level debugging level; controls the amount of debugging
\t output printed; 0 by default (no debug output)
\t--dart-server The dart server to send results to.
\t--ftp The ftp URL to upload results to.
''' % '\n\t'.join( commands.keys() )
def main():
if len(sys.argv) > 1 and sys.argv[1] in commands:
command = sys.argv[1]
args = sys.argv[ 2: ]
else:
command = 'collect-and-upload'
args = sys.argv[ 1: ]
commands[ command ]( **accept_args( args ) )
if __name__ != '__main__': import utils
else:
# in absense of relative import...
xsl_path = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
while os.path.basename( xsl_path ) != 'xsl_reports': xsl_path = os.path.dirname( xsl_path )
sys.path.append( xsl_path )
import utils
main()

View File

@@ -1,41 +0,0 @@
// tools/regression/src/detail/common.hpp --------------------------------------------//
// Copyright Beman Dawes 2002.
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
#ifndef BOOST_REGRESSION_TOOLS_COMMON_HPP
#define BOOST_REGRESSION_TOOLS_COMMON_HPP
#include <boost/filesystem/operations.hpp>
#include <boost/assert.hpp>
#include <string>
namespace boost
{
namespace regression_tools
{
// boost_root_path -----------------------------------------------------------------//
inline boost::filesystem::path boost_root_path(const boost::filesystem::path& start =
boost::filesystem::initial_path())
// Requires: start is absolute
{
BOOST_ASSERT(start.is_absolute());
for(boost::filesystem::path root = start;
!root.empty();
root = root.parent_path())
{
if ( boost::filesystem::exists(root / "libs"))
return root;
}
return boost::filesystem::path();
}
} // namespace regression_tools
} // namespace boost
#endif // include guard

View File

@@ -1,167 +0,0 @@
// tiny XML sub-set tools implementation -----------------------------------//
// (C) Copyright Beman Dawes 2002. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include "tiny_xml.hpp"
#include <cassert>
#include <cstring>
namespace
{
void eat_whitespace( char & c, std::istream & in )
{
while ( c == ' ' || c == '\r' || c == '\n' || c == '\t' )
in.get( c );
}
std::string get_name( char & c, std::istream & in )
{
std::string result;
eat_whitespace( c, in );
while ( std::strchr(
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_-.", c )
!= 0 )
{
result += c;
if(!in.get( c ))
throw std::string("xml: unexpected eof");
}
return result;
}
void eat_delim( char & c, std::istream & in,
char delim, const std::string & msg )
{
eat_whitespace( c, in );
if ( c != delim )
throw std::string("xml syntax error, expected ") + delim
+ " (" + msg + ")";
in.get( c );
}
std::string get_value( char & c, std::istream & in )
{
std::string result;
while ( c != '\"' )
{
result += c;
in.get( c );
}
in.get( c );
return result;
}
}
namespace boost
{
namespace tiny_xml
{
// parse -----------------------------------------------------------------//
element_ptr parse( std::istream & in, const std::string & msg )
{
char c = 0; // current character
element_ptr e( new element );
if(!in.get( c ))
throw std::string("xml: unexpected eof");
if ( c == '<' )
if(!in.get( c ))
throw std::string("xml: unexpected eof");
e->name = get_name( c, in );
eat_whitespace( c, in );
// attributes
while ( c != '>' )
{
attribute a;
a.name = get_name( c, in );
eat_delim( c, in, '=', msg );
eat_delim( c, in, '\"', msg );
a.value = get_value( c, in );
e->attributes.push_back( a );
eat_whitespace( c, in );
}
if(!in.get( c )) // next after '>'
throw std::string("xml: unexpected eof");
eat_whitespace( c, in );
// sub-elements
while ( c == '<' )
{
if ( in.peek() == '/' ) break;
e->elements.push_back( parse( in, msg ) );
in.get( c ); // next after '>'
eat_whitespace( c, in );
}
// content
if ( c != '<' )
{
e->content += '\n';
while ( c != '<' )
{
e->content += c;
if(!in.get( c ))
throw std::string("xml: unexpected eof");
}
}
assert( c == '<' );
if(!in.get( c )) // next after '<'
throw std::string("xml: unexpected eof");
eat_delim( c, in, '/', msg );
std::string end_name( get_name( c, in ) );
if ( e->name != end_name )
throw std::string("xml syntax error: beginning name ")
+ e->name + " did not match end name " + end_name
+ " (" + msg + ")";
eat_delim( c, in, '>', msg );
return e;
}
// write ---------------------------------------------------------------//
void write( const element & e, std::ostream & out )
{
out << "<" << e.name;
if ( !e.attributes.empty() )
{
for( attribute_list::const_iterator itr = e.attributes.begin();
itr != e.attributes.end(); ++itr )
{
out << " " << itr->name << "=\"" << itr->value << "\"";
}
}
out << ">";
if ( !e.elements.empty() )
{
out << "\n";
for( element_list::const_iterator itr = e.elements.begin();
itr != e.elements.end(); ++itr )
{
write( **itr, out );
}
}
if ( !e.content.empty() )
{
out << e.content;
}
out << "</" << e.name << ">\n";
}
} // namespace tiny_xml
} // namespace boost

View File

@@ -1,70 +0,0 @@
// tiny XML sub-set tools --------------------------------------------------//
// (C) Copyright Beman Dawes 2002. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Provides self-contained tools for this XML sub-set:
//
// element ::= { "<" name { name "=" "\"" value "\"" } ">"
// {element} [contents] "</" name ">" }
//
// The point of "self-contained" is to minimize tool-chain dependencies.
#ifndef BOOST_TINY_XML_H
#define BOOST_TINY_XML_H
#include "boost/smart_ptr.hpp" // for shared_ptr
#include "boost/utility.hpp" // for noncopyable
#include <list>
#include <iostream>
#include <string>
namespace boost
{
namespace tiny_xml
{
class element;
struct attribute
{
std::string name;
std::string value;
attribute(){}
attribute( const std::string & name, const std::string & value )
: name(name), value(value) {}
};
typedef boost::shared_ptr< element > element_ptr;
typedef std::list< element_ptr > element_list;
typedef std::list< attribute > attribute_list;
class element
: private boost::noncopyable // because deep copy sematics would be required
{
public:
std::string name;
attribute_list attributes;
element_list elements;
std::string content;
element() {}
explicit element( const std::string & name ) : name(name) {}
};
element_ptr parse( std::istream & in, const std::string & msg );
// Precondition: stream positioned at either the initial "<"
// or the first character after the initial "<".
// Postcondition: stream positioned at the first character after final
// ">" (or eof).
// Returns: an element_ptr to an element representing the parsed stream.
// Throws: std::string on syntax error. msg appended to what() string.
void write( const element & e, std::ostream & out );
}
}
#endif // BOOST_TINY_XML_H

View File

@@ -1,17 +0,0 @@
// tiny XML test program ---------------------------------------------------//
// Copyright Beman Dawes 2002. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include "tiny_xml.hpp"
#include <iostream>
int main()
{
boost::tiny_xml::element_ptr tree( boost::tiny_xml::parse( std::cin ) );
boost::tiny_xml::write( *tree, std::cout );
return 0;
}

View File

@@ -1,17 +0,0 @@
<root>
<frontmatter>
// (C) Copyright Beman Dawes 2002. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
</frontmatter>
<element-1 at-1="abcd" at-2 = "defg" >
<element-1a>
It's Howdy Doody time!
</element-1a>
<element-1b>It's not Howdy Doody time!</element-1b>
</element-1>
<element-2>
It's
Eastern Standard time!
</element-2>
</root>

View File

@@ -1,206 +0,0 @@
#!/usr/bin/python
# Copyright Rene Rivera 2012-2013
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os
import sys
root = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0,os.path.join(root,'dulwich'))
import shutil
import stat
import dulwich
import dulwich.index
from dulwich.client import get_transport_and_path
from dulwich.repo import Repo
from dulwich.objects import ZERO_SHA, Tree
from dulwich.file import ensure_dir_exists
from dulwich.index import changes_from_tree, Index, index_entry_from_stat
if __name__ == '__main__':
print "# Using dulwich version %s.%s.%s"%(dulwich.__version__)
class RemoteRepo(Repo):
def __init__(self, root, remote_uri, branch='develop', reset=False):
if reset:
if os.path.exists(root):
shutil.rmtree(root)
if not os.path.isdir(root):
dulwich.file.ensure_dir_exists(root)
controldir = os.path.join(root, ".git")
os.mkdir(controldir)
Repo._init_maybe_bare(controldir, False)
Repo.__init__(self,root)
self.remote_uri = remote_uri
self.client, self.host_path = get_transport_and_path(self.remote_uri)
self.want_branch = branch
self.want_reset = reset
print "# Remote repo %s @ %s => %s"%(self.remote_uri,self._branch_ref(),self.path)
def _branch_ref(self):
return "refs/heads/"+self.want_branch
def _head_ref(self):
return 'HEAD'
def _determine_wants(self,refs):
result = []
for (ref,sha) in refs.iteritems():
if sha == ZERO_SHA:
continue
if sha in self.object_store:
continue
if ref == self._branch_ref():
result.append(sha)
return result
def _fetch(self):
print "# Fetching repo %s => %s.."%(self.remote_uri,self.path)
remote_refs = self.client.fetch(
self.host_path,
self,
determine_wants=self._determine_wants,
progress=sys.stdout.write)
self[self._branch_ref()] = remote_refs[self._branch_ref()]
def _get_tree(self, sha=None):
if not sha:
return self[self._branch_ref()].tree
else:
return self[sha].tree
def _get_entry_at_path(self, path, sha=None):
for entry in self.object_store.iter_tree_contents(self._get_tree(sha)):
if entry.path == path:
return entry
return None
def _init_subrepos(self, sha=None):
self.subrepo = {}
gitmodules_entry = self._get_entry_at_path(".gitmodules",sha)
if gitmodules_entry:
modules = self.object_store.get_raw(gitmodules_entry.sha)[1].splitlines()
modules.append(None)
submodule = None
submodule_path = None
submodule_url = None
for l in modules:
if None == l or l.startswith("[submodule "):
if None != submodule and None != submodule_path and \
None != submodule_url:
submodule_entry = self._get_entry_at_path(submodule_path)
self.subrepo[submodule_path] \
= (submodule_entry.sha,
RemoteRepo(
os.path.join(self.path,submodule_path),
submodule_url,
branch = self.want_branch,
reset = self.want_reset))
if None != l:
submodule = l.split("\"")[1]
submodule_path = None
submodule_url = None
elif l.strip().startswith("path = "):
submodule_path = l.strip().rsplit(" ",1)[-1]
elif l.strip().startswith("url = "):
submodule_url = l.strip().rsplit(" ",1)[-1]
if submodule_url.startswith('git:'):
submodule_url = submodule_url.replace("git:","https:")
if submodule_url.startswith('.'):
submodule_url = self.remote_uri+"/"+submodule_url
def _checkout_entry(self,index,path,mode,sha):
full_path = os.path.join(self.path, path)
checked_out_entry = False
if stat.S_ISREG(mode):
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
f = open(full_path, 'wb')
try:
f.write(self.object_store[sha].as_raw_string())
finally:
f.close()
os.chmod(full_path, mode)
checked_out_entry = True
elif dulwich.objects.S_ISGITLINK(mode) and path in self.subrepo:
self.subrepo[path][1].checkout(self.subrepo[path][0])
checked_out_entry = True
if checked_out_entry:
st = os.lstat(full_path)
index[path] = index_entry_from_stat(st, sha, 0, mode=mode)
index.write()
def checkout(self, sha=None):
self._fetch()
print "# Checkout repo %s @ %s => %s.."%(self.remote_uri,sha,self.path)
self._init_subrepos(sha)
target = self['HEAD']
if sha:
target = self[sha]
target_tree = target.tree
index = Index(self.index_path())
for e_path,e_mode,e_sha in index.changes_from_tree(self.object_store, target_tree, want_unchanged=True):
e_source = (e_path[1],e_mode[1],e_sha[1])
e_target = (e_path[0],e_mode[0],e_sha[0])
e_to_checkout = None
if e_source[0] and e_target[0]:
if not os.path.exists(os.path.join(self.path, e_target[0])):
e_to_checkout = ("A",e_target)
elif e_source[2] != e_target[2]:
e_to_checkout = ("U",e_target)
elif not e_target[0]:
print "D %s"%(os.path.join(self.path, e_source[0]))
if stat.S_ISREG(e_source[1]):
os.unlink(os.path.join(self.path, e_source[0]))
del index[e_source[0]]
else:
e_to_checkout = ("A",e_target)
if e_to_checkout:
print "%s %s"%(e_to_checkout[0],os.path.join(self.path,e_to_checkout[1][0]))
self._checkout_entry(index, *e_to_checkout[1])
self._init_subrepos(sha)
for subrepo in self.subrepo.values():
subrepo[1].checkout(subrepo[0])
def status(self, sha=None):
print "# Status.."
target = self['HEAD']
if sha:
target = self[sha]
target_tree = target.tree
index = Index(self.index_path())
for e_path,e_mode,e_sha in index.changes_from_tree(self.object_store, target_tree, want_unchanged=True):
e_source = (e_path[0],e_mode[0],e_sha[0])
e_target = (e_path[1],e_mode[1],e_sha[1])
if e_source[0] and e_target[0]:
if not os.path.exists(os.path.join(self.path, e_source[0])):
print "D %s"%(os.path.join(self.path, e_source[0]))
elif e_source[2] != e_target[2]:
print "M %s"%(os.path.join(self.path, e_source[0]))
elif not e_target[0]:
print "D %s"%(os.path.join(self.path, e_source[0]))
else:
print "A %s"%(os.path.join(self.path, e_target[0]))
if __name__ == '__main__':
if True:
g = RemoteRepo(
'git_test_boost',
'https://github.com/boostorg/boost.git',
branch='master',
reset=False)
g.checkout()
g.status()
if False:
g = RemoteRepo(
'git_test_predef',
'https://github.com/grafikrobot/boost-predef.git',
branch='master',
reset=True)
g.checkout()
g.status()

View File

@@ -1,468 +0,0 @@
#!/usr/bin/python
# Copyright 2008 Rene Rivera
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
import optparse
import time
import xml.dom.minidom
import xml.dom.pulldom
from xml.sax.saxutils import unescape, escape
import os.path
#~ Process a bjam XML log into the XML log format for Boost result processing.
class BJamLog2Results:
def __init__(self,args=None):
opt = optparse.OptionParser(
usage="%prog [options] input")
opt.add_option( '--output',
help="output file" )
opt.add_option( '--runner',
help="runner ID (e.g. 'Metacomm')" )
opt.add_option( '--comment',
help="an HTML comment file to be inserted in the reports" )
opt.add_option( '--tag',
help="the tag for the results" )
opt.add_option( '--incremental',
help="do incremental run (do not remove previous binaries)",
action='store_true' )
opt.add_option( '--platform' )
opt.add_option( '--source' )
opt.add_option( '--revision' )
self.output = None
self.runner = None
self.comment='comment.html'
self.tag='trunk'
self.incremental=False
self.platform=''
self.source='SVN'
self.revision=None
self.input = []
( _opt_, self.input ) = opt.parse_args(args,self)
if self.incremental:
run_type = 'incremental'
else:
run_type = 'full'
self.results = xml.dom.minidom.parseString('''<?xml version="1.0" encoding="UTF-8"?>
<test-run
source="%(source)s"
runner="%(runner)s"
timestamp=""
platform="%(platform)s"
tag="%(tag)s"
run-type="%(run-type)s"
revision="%(revision)s">
</test-run>
''' % {
'source' : self.source,
'runner' : self.runner,
'platform' : self.platform,
'tag' : self.tag,
'run-type' : run_type,
'revision' : self.revision,
} )
self.test = {}
self.target_to_test = {}
self.target = {}
self.parent = {}
self.log = {}
self.add_log()
self.gen_output()
#~ print self.test
#~ print self.target
def add_log(self):
if self.input[0]:
bjam_xml = self.input[0]
else:
bjam_xml = self.input[1]
events = xml.dom.pulldom.parse(bjam_xml)
context = []
test_run = self.results.documentElement
for (event,node) in events:
if event == xml.dom.pulldom.START_ELEMENT:
context.append(node)
if node.nodeType == xml.dom.Node.ELEMENT_NODE:
x_f = self.x_name_(*context)
if x_f:
events.expandNode(node)
# expanding eats the end element, hence walking us out one level
context.pop()
# call the translator, and add returned items to the result
items = (x_f[1])(node)
if items:
for item in items:
if item:
test_run.appendChild(self.results.createTextNode("\n"))
test_run.appendChild(item)
elif event == xml.dom.pulldom.END_ELEMENT:
context.pop()
#~ Add the log items nwo that we've collected all of them.
items = self.log.values()
if items:
for item in items:
if item:
test_run.appendChild(self.results.createTextNode("\n"))
test_run.appendChild(item)
def gen_output(self):
if self.output:
out = open(self.output,'w')
else:
out = sys.stdout
if out:
self.results.writexml(out,encoding='utf-8')
def tostring(self):
return self.results.toxml('utf-8')
def x_name_(self, *context, **kwargs):
node = None
names = [ ]
for c in context:
if c:
if not isinstance(c,xml.dom.Node):
suffix = '_'+c.replace('-','_').replace('#','_')
else:
suffix = '_'+c.nodeName.replace('-','_').replace('#','_')
node = c
names.append('x')
names = map(lambda x: x+suffix,names)
if node:
for name in names:
if hasattr(self,name):
return (name,getattr(self,name))
return None
def x(self, *context, **kwargs):
node = None
names = [ ]
for c in context:
if c:
if not isinstance(c,xml.dom.Node):
suffix = '_'+c.replace('-','_').replace('#','_')
else:
suffix = '_'+c.nodeName.replace('-','_').replace('#','_')
node = c
names.append('x')
names = map(lambda x: x+suffix,names)
if node:
for name in names:
if hasattr(self,name):
return getattr(self,name)(node,**kwargs)
else:
assert False, 'Unknown node type %s'%(name)
return None
#~ The timestamp goes to the corresponding attribute in the result.
def x_build_timestamp( self, node ):
test_run = self.results.documentElement
test_run.setAttribute('timestamp',self.get_data(node).strip())
return None
#~ Comment file becomes a comment node.
def x_build_comment( self, node ):
comment = None
if self.comment:
comment_f = open(self.comment)
if comment_f:
comment = comment_f.read()
comment_f.close()
if not comment:
comment = ''
return [self.new_text('comment',comment)]
#~ Tests are remembered for future reference.
def x_build_test( self, node ):
test_run = self.results.documentElement
test_node = node
test_name = test_node.getAttribute('name')
self.test[test_name] = {
'library' : '/'.join(test_name.split('/')[0:-1]),
'test-name' : test_name.split('/')[-1],
'test-type' : test_node.getAttribute('type').lower(),
'test-program' : self.get_child_data(test_node,tag='source',strip=True),
'target' : self.get_child_data(test_node,tag='target',strip=True),
'info' : self.get_child_data(test_node,tag='info',strip=True)
}
#~ Add a lookup for the test given the test target.
self.target_to_test[self.test[test_name]['target']] = test_name
#~ print "--- %s\n => %s" %(self.test[test_name]['target'],test_name)
return None
#~ Process the target dependency DAG into an ancestry tree so we can look up
#~ which top-level library and test targets specific build actions correspond to.
def x_build_targets_target( self, node ):
test_run = self.results.documentElement
target_node = node
name = self.get_child_data(target_node,tag='name',strip=True)
path = self.get_child_data(target_node,tag='path',strip=True)
jam_target = self.get_child_data(target_node,tag='jam-target',strip=True)
#~ print "--- target :: %s" %(name)
#~ Map for jam targets to virtual targets.
self.target[jam_target] = {
'name' : name,
'path' : path
}
#~ Create the ancestry.
dep_node = self.get_child(self.get_child(target_node,tag='dependencies'),tag='dependency')
while dep_node:
child = self.get_data(dep_node,strip=True)
child_jam_target = '<p%s>%s' % (path,child.split('//',1)[1])
self.parent[child_jam_target] = jam_target
#~ print "--- %s\n ^ %s" %(jam_target,child_jam_target)
dep_node = self.get_sibling(dep_node.nextSibling,tag='dependency')
return None
#~ Given a build action log, process into the corresponding test log and
#~ specific test log sub-part.
def x_build_action( self, node ):
test_run = self.results.documentElement
action_node = node
name = self.get_child(action_node,tag='name')
if name:
name = self.get_data(name)
#~ Based on the action, we decide what sub-section the log
#~ should go into.
action_type = None
if re.match('[^%]+%[^.]+[.](compile)',name):
action_type = 'compile'
elif re.match('[^%]+%[^.]+[.](link|archive)',name):
action_type = 'link'
elif re.match('[^%]+%testing[.](capture-output)',name):
action_type = 'run'
elif re.match('[^%]+%testing[.](expect-failure|expect-success)',name):
action_type = 'result'
#~ print "+ [%s] %s %s :: %s" %(action_type,name,'','')
if action_type:
#~ Get the corresponding test.
(target,test) = self.get_test(action_node,type=action_type)
#~ Skip action that have no correspoding test as they are
#~ regular build actions and don't need to show up in the
#~ regression results.
if not test:
return None
#~ And the log node, which we will add the results to.
log = self.get_log(action_node,test)
#~ print "--- [%s] %s %s :: %s" %(action_type,name,target,test)
#~ Collect some basic info about the action.
result_data = "%(info)s\n\n%(command)s\n%(output)s\n" % {
'command' : self.get_action_command(action_node,action_type),
'output' : self.get_action_output(action_node,action_type),
'info' : self.get_action_info(action_node,action_type)
}
#~ For the test result status we find the appropriate node
#~ based on the type of test. Then adjust the result status
#~ acorrdingly. This makes the result status reflect the
#~ expectation as the result pages post processing does not
#~ account for this inversion.
action_tag = action_type
if action_type == 'result':
if re.match(r'^compile',test['test-type']):
action_tag = 'compile'
elif re.match(r'^link',test['test-type']):
action_tag = 'link'
elif re.match(r'^run',test['test-type']):
action_tag = 'run'
#~ The result sub-part we will add this result to.
result_node = self.get_child(log,tag=action_tag)
if action_node.getAttribute('status') == '0':
action_result = 'succeed'
else:
action_result = 'fail'
if not result_node:
#~ If we don't have one already, create it and add the result.
result_node = self.new_text(action_tag,result_data,
result = action_result,
timestamp = action_node.getAttribute('start'))
log.appendChild(self.results.createTextNode("\n"))
log.appendChild(result_node)
else:
#~ For an existing result node we set the status to fail
#~ when any of the individual actions fail, except for result
#~ status.
if action_type != 'result':
result = result_node.getAttribute('result')
if action_node.getAttribute('status') != '0':
result = 'fail'
else:
result = action_result
result_node.setAttribute('result',result)
result_node.appendChild(self.results.createTextNode("\n"))
result_node.appendChild(self.results.createTextNode(result_data))
return None
#~ The command executed for the action. For run actions we omit the command
#~ as it's just noise.
def get_action_command( self, action_node, action_type ):
if action_type != 'run':
return self.get_child_data(action_node,tag='command')
else:
return ''
#~ The command output.
def get_action_output( self, action_node, action_type ):
return self.get_child_data(action_node,tag='output',default='')
#~ Some basic info about the action.
def get_action_info( self, action_node, action_type ):
info = ""
#~ The jam action and target.
info += "%s %s\n" %(self.get_child_data(action_node,tag='name'),
self.get_child_data(action_node,tag='path'))
#~ The timing of the action.
info += "Time: (start) %s -- (end) %s -- (user) %s -- (system) %s\n" %(
action_node.getAttribute('start'), action_node.getAttribute('end'),
action_node.getAttribute('user'), action_node.getAttribute('system'))
#~ And for compiles some context that may be hidden if using response files.
if action_type == 'compile':
define = self.get_child(self.get_child(action_node,tag='properties'),name='define')
while define:
info += "Define: %s\n" %(self.get_data(define,strip=True))
define = self.get_sibling(define.nextSibling,name='define')
return info
#~ Find the test corresponding to an action. For testing targets these
#~ are the ones pre-declared in the --dump-test option. For libraries
#~ we create a dummy test as needed.
def get_test( self, node, type = None ):
jam_target = self.get_child_data(node,tag='jam-target')
base = self.target[jam_target]['name']
target = jam_target
while target in self.parent:
target = self.parent[target]
#~ print "--- TEST: %s ==> %s" %(jam_target,target)
#~ main-target-type is a precise indicator of what the build target is
#~ proginally meant to be.
main_type = self.get_child_data(self.get_child(node,tag='properties'),
name='main-target-type',strip=True)
if main_type == 'LIB' and type:
lib = self.target[target]['name']
if not lib in self.test:
self.test[lib] = {
'library' : re.search(r'libs/([^/]+)',lib).group(1),
'test-name' : os.path.basename(lib),
'test-type' : 'lib',
'test-program' : os.path.basename(lib),
'target' : lib
}
test = self.test[lib]
else:
target_name_ = self.target[target]['name']
if self.target_to_test.has_key(target_name_):
test = self.test[self.target_to_test[target_name_]]
else:
test = None
return (base,test)
#~ Find, or create, the test-log node to add results to.
def get_log( self, node, test ):
target_directory = os.path.dirname(self.get_child_data(
node,tag='path',strip=True))
target_directory = re.sub(r'.*[/\\]bin[.]v2[/\\]','',target_directory)
target_directory = re.sub(r'[\\]','/',target_directory)
if not target_directory in self.log:
if 'info' in test and test['info'] == 'always_show_run_output':
show_run_output = 'true'
else:
show_run_output = 'false'
self.log[target_directory] = self.new_node('test-log',
library=test['library'],
test_name=test['test-name'],
test_type=test['test-type'],
test_program=test['test-program'],
toolset=self.get_toolset(node),
target_directory=target_directory,
show_run_output=show_run_output)
return self.log[target_directory]
#~ The precise toolset from the build properties.
def get_toolset( self, node ):
toolset = self.get_child_data(self.get_child(node,tag='properties'),
name='toolset',strip=True)
toolset_version = self.get_child_data(self.get_child(node,tag='properties'),
name='toolset-%s:version'%toolset,strip=True)
return '%s-%s' %(toolset,toolset_version)
#~ XML utilities...
def get_sibling( self, sibling, tag = None, id = None, name = None, type = None ):
n = sibling
while n:
found = True
if type and found:
found = found and type == n.nodeType
if tag and found:
found = found and tag == n.nodeName
if (id or name) and found:
found = found and n.nodeType == xml.dom.Node.ELEMENT_NODE
if id and found:
if n.hasAttribute('id'):
found = found and n.getAttribute('id') == id
else:
found = found and n.hasAttribute('id') and n.getAttribute('id') == id
if name and found:
found = found and n.hasAttribute('name') and n.getAttribute('name') == name
if found:
return n
n = n.nextSibling
return None
def get_child( self, root, tag = None, id = None, name = None, type = None ):
return self.get_sibling(root.firstChild,tag=tag,id=id,name=name,type=type)
def get_data( self, node, strip = False, default = None ):
data = None
if node:
data_node = None
if not data_node:
data_node = self.get_child(node,tag='#text')
if not data_node:
data_node = self.get_child(node,tag='#cdata-section')
data = ""
while data_node:
data += data_node.data
data_node = data_node.nextSibling
if data_node:
if data_node.nodeName != '#text' \
and data_node.nodeName != '#cdata-section':
data_node = None
if not data:
data = default
else:
if strip:
data = data.strip()
return data
def get_child_data( self, root, tag = None, id = None, name = None, strip = False, default = None ):
return self.get_data(self.get_child(root,tag=tag,id=id,name=name),strip=strip,default=default)
def new_node( self, tag, *child, **kwargs ):
result = self.results.createElement(tag)
for k in kwargs.keys():
if kwargs[k] != '':
if k == 'id':
result.setAttribute('id',kwargs[k])
elif k == 'klass':
result.setAttribute('class',kwargs[k])
else:
result.setAttribute(k.replace('_','-'),kwargs[k])
for c in child:
if c:
result.appendChild(c)
return result
def new_text( self, tag, data, **kwargs ):
result = self.new_node(tag,**kwargs)
data = data.strip()
if len(data) > 0:
result.appendChild(self.results.createTextNode(data))
return result
if __name__ == '__main__': BJamLog2Results()

View File

@@ -1,197 +0,0 @@
#!/usr/bin/perl
#~ Copyright 2003, Rene Rivera.
#~ Use, modification and distribution are subject to the Boost Software
#~ License Version 1.0. (See accompanying file LICENSE_1_0.txt or
#~ http://www.boost.org/LICENSE_1_0.txt)
use FileHandle;
use Time::Local;
# Get the whle percent value
#
sub percent_value
{
my ($count,$total) = @_;
my $percent = int (($count/$total)*100+0.5);
if ($count > 0 && $percent == 0) { $percent = 1; }
if ($count < $total && $percent == 100) { $percent = 99; }
return $percent;
}
# Generate item html for the pass column.
#
sub result_info_pass
{
my ($color,$pass,$warn,$fail,$missing) = @_;
my $percent = 100-percent_value($fail+$missing,$pass+$warn+$fail+$missing);
return "<font color=\"$color\"><font size=\"+1\">$percent%</font><br>($warn&nbsp;warnings)</font>";
}
# Generate item html for the fail column.
#
sub result_info_fail
{
my ($color,$pass,$warn,$fail,$missing) = @_;
my $percent = percent_value($fail+$missing,$pass+$warn+$fail+$missing);
return "<font color=\"$color\"><font size=\"+1\">$percent%</font><br>($fail)</font>";
}
# Generate an age highlighted run date string.
# Use as: data_info(run-date-html)
#
sub date_info
{
my %m = ('January',0,'February',1,'March',2,'April',3,'May',4,'June',5,
'July',6,'August',7,'September',8,'October',9,'November',10,'December',11);
my @d = split(/ |:/,$_[0]);
my ($hour,$min,$sec,$day,$month,$year) = ($d[0],$d[1],$d[2],$d[4],$m{$d[5]},$d[6]);
#print "<!-- $hour.$min.$sec.$day.$month.$year -->\n";
my $test_t = timegm($sec,$min,$hour,$day,$month,$year);
my $age = time-$test_t;
my $age_days = $age/(60*60*24);
#print "<!-- $age_days days old -->\n";
my $age_html = "<font>";
if ($age_days <= 2) { }
elsif ($age_days <= 14) { $age_html = "<font color=\"#FF9900\">"; }
else { $age_html = "<font color=\"#FF0000\">"; }
return $age_html.$_[0]."</font>";
}
# Generate an age string based on the run date.
# Use as: age_info(run-date-html)
#
sub age_info
{
my %m = ('January',0,'February',1,'March',2,'April',3,'May',4,'June',5,
'July',6,'August',7,'September',8,'October',9,'November',10,'December',11);
my @d = split(/ |:/,$_[0]);
my ($hour,$min,$sec,$day,$month,$year) = ($d[0],$d[1],$d[2],$d[4],$m{$d[5]},$d[6]);
#print "<!-- $hour.$min.$sec.$day.$month.$year -->\n";
my $test_t = timegm($sec,$min,$hour,$day,$month,$year);
my $age = time-$test_t;
my $age_days = $age/(60*60*24);
#print "<!-- $age_days days old -->\n";
my $age_html = "<font>";
if ($age_days <= 2) { }
elsif ($age_days <= 14) { $age_html = "<font color=\"#FF9900\">"; }
else { $age_html = "<font color=\"#FF0000\">"; }
if ($age_days <= 1) { $age_html = $age_html."today"; }
elsif ($age_days <= 2) { $age_html = $age_html."yesterday"; }
elsif ($age_days < 14) { my $days = int $age_days; $age_html = $age_html.$days." days"; }
elsif ($age_days < 7*8) { my $weeks = int $age_days/7; $age_html = $age_html.$weeks." weeks"; }
else { my $months = int $age_days/28; $age_html = $age_html.$months." months"; }
return $age_html."</font>";
}
#~ foreach my $k (sort keys %ENV)
#~ {
#~ print "<!-- $k = $ENV{$k} -->\n";
#~ }
my $logdir = "$ENV{PWD}";
#~ my $logdir = "C:\\CVSROOTs\\Boost\\boost\\status";
opendir LOGS, "$logdir";
my @logs = grep /.*links[^.]*\.html$/, readdir LOGS;
closedir LOGS;
my @bgcolor = ( "bgcolor=\"#EEEEFF\"", "" );
my $row = 0;
print "<table>\n";
print "<tr>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Platform</th>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Run Date</th>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Age</th>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Compilers</th>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Pass</th>\n",
"<th align=\"left\" bgcolor=\"#DDDDDD\">Fail</th>\n",
"</tr>\n";
foreach $l (sort { lc($a) cmp lc($b) } @logs)
{
my $log = $l;
$log =~ s/-links//s;
my ($spec) = ($log =~ /cs-([^\.]+)/);
my $fh = new FileHandle;
if ($fh->open("<$logdir/$log"))
{
my $content = join('',$fh->getlines());
$fh->close;
my ($status) = ($content =~ /(<h1>Compiler(.(?!<\/td>))+.)/si);
my ($platform) = ($status =~ /Status: ([^<]+)/si);
my ($run_date) = ($status =~ /Date:<\/b> ([^<]+)/si);
$run_date =~ s/, /<br>/g;
my ($compilers) = ($content =~ /Test Type<\/a><\/t[dh]>((.(?!<\/tr>))+.)/si);
if ($compilers eq "") { next; }
$compilers =~ s/-<br>//g;
$compilers =~ s/<\/td>//g;
my @compiler = ($compilers =~ /<td>(.*)$/gim);
my $count = @compiler;
my @results = ($content =~ /(>Pass<|>Warn<|>Fail<|>Missing<)/gi);
my $test_count = (scalar @results)/$count;
my @pass = map { 0 } (1..$count);
my @warn = map { 0 } (1..$count);
my @fail = map { 0 } (1..$count);
my @missing = map { 0 } (1..$count);
my @total = map { 0 } (1..$count);
#~ print "<!-- ",
#~ "pass = ",join(',',@pass)," ",
#~ "warn = ",join(',',@warn)," ",
#~ "fail = ",join(',',@fail)," ",
#~ "missing = ",join(',',@missing)," ",
#~ "total = ",join(',',@total)," ",
#~ " -->\n";
for my $t (1..$test_count)
{
my $r0 = (($t-1)*$count);
my $r1 = (($t-1)*$count+$count-1);
my @r = @results[(($t-1)*$count)..(($t-1)*$count+$count-1)];
#~ print "<!-- ",
#~ "result = ",join(',',@r)," ",
#~ "range = ",$r0,"..",$r1," (",(scalar @results),")",
#~ " -->\n";
for my $c (1..$count)
{
if ($r[$c-1] =~ /Pass/i) { ++$pass[$c-1]; }
elsif ($r[$c-1] =~ /Warn/i) { ++$warn[$c-1]; }
elsif ($r[$c-1] =~ /Fail/i) { ++$fail[$c-1]; }
elsif ($r[$c-1] =~ /Missing/i) { ++$missing[$c-1]; }
++$total[$c-1];
}
}
#~ print "<!-- ",
#~ "pass = ",join(',',@pass)," ",
#~ "warn = ",join(',',@warn)," ",
#~ "fail = ",join(',',@fail)," ",
#~ "missing = ",join(',',@missing)," ",
#~ "total = ",join(',',@total)," ",
#~ " -->\n";
for my $comp (1..(scalar @compiler))
{
my @lines = split(/<br>/,$compiler[$comp-1]);
if (@lines > 2) { $compiler[$comp-1] = join(' ',@lines[0..(scalar @lines)-2])."<br>".$lines[(scalar @lines)-1]; }
}
print
"<tr>\n",
"<td rowspan=\"$count\" valign=\"top\"><font size=\"+1\">$platform</font><br>(<a href=\"./$log\">$spec</a>)</td>\n",
"<td rowspan=\"$count\" valign=\"top\">",$run_date,"</td>\n",
"<td rowspan=\"$count\" valign=\"top\">",age_info($run_date),"</td>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",$compiler[0],"</td>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",result_info_pass("#000000",$pass[0],$warn[0],$fail[0],$missing[0]),"</td>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",result_info_fail("#FF0000",$pass[0],$warn[0],$fail[0],$missing[0]),"</td>\n",
"</tr>\n";
$row = ($row+1)%2;
foreach my $c (1..($count-1))
{
print
"<tr>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",$compiler[$c],"</td>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",result_info_pass("#000000",$pass[$c],$warn[$c],$fail[$c],$missing[$c]),"</td>\n",
"<td valign=\"top\" ",$bgcolor[$row],">",result_info_fail("#FF0000",$pass[$c],$warn[$c],$fail[$c],$missing[$c]),"</td>\n",
"</tr>\n";
$row = ($row+1)%2;
}
print
"<tr>\n",
"<td colspan=\"7\"><hr size=\"1\" noshade></td>\n",
"</tr>\n";
}
}
print "</table>\n";

File diff suppressed because it is too large Load Diff

View File

@@ -1,216 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "add_expected_results.hpp"
#include "common.hpp"
#include "xml.hpp"
#include <string>
#include <boost/foreach.hpp>
#include <boost/functional/hash.hpp>
using namespace boost::regression;
bool target_result(const test_structure_t::test_log_t& test_log, const std::string& name) {
boost::unordered_map<std::string, test_structure_t::target_t>::const_iterator pos = test_log.targets.find(name);
if(pos != test_log.targets.end()) {
return pos->second.result;
} else {
return false;
}
}
bool is_test_log_complete(const test_structure_t::test_log_t& test_log) {
// FIXME: The original XSL function is buggy and
// Boost.Build relies on its behavior
return true;
if(test_log.test_type == "compile" || test_log.test_type == "compile_fail" ||
!target_result(test_log, "compile")) {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 0 &&
test_log.targets.count("run") == 0;
} else if(test_log.test_type == "link" || test_log.test_type == "link_fail" ||
test_log.test_type == "" || test_log.test_type == "lib" ||
!target_result(test_log, "link")) {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 1 &&
test_log.targets.count("run") == 0;
} else if(test_log.test_type == "run" || test_log.test_type == "run_fail" ||
test_log.test_type == "run_pyd" || test_log.test_type == "run_mpi") {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 1 &&
test_log.targets.count("run") == 1;
} else {
throw std::runtime_error("Unknown test type " + test_log.test_type);
}
}
std::string get_toolset_name(const std::string& toolset, const expected_results_t& expected_results) {
expected_results_t::toolset_aliases_t::const_iterator pos = expected_results.toolset_aliases.find(toolset);
if(pos != expected_results.toolset_aliases.end()) {
return pos->second;
} else {
return toolset;
}
}
void add_note(test_structure_t::test_log_t& test_log, const std::string& text, const std::string& class_name = "auto-note") {
test_log.notes.push_back("<span class=\"" + class_name + "\">" + text + "</span>");
}
void process_test_log(test_structure_t::test_log_t& test_log,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source) {
bool is_complete = is_test_log_complete(test_log);
bool has_failures = false;
typedef boost::unordered_map<std::string, test_structure_t::target_t>::const_reference target_ref;
BOOST_FOREACH(target_ref target, test_log.targets) {
if(!target.second.result) {
has_failures = true;
break;
}
}
bool actual_result = !(has_failures || !is_complete);
std::string toolset_name = get_toolset_name(test_log.toolset, expected_results);
const bool* expected_results_test_case = 0;
{
test_case_t test_id;
test_id.library = test_log.library;
test_id.test_name = test_log.test_name;
test_id.toolset_name = toolset_name;
expected_results_t::tests_t::const_iterator pos = expected_results.tests.find(test_id);
if(pos != expected_results.tests.end()) {
expected_results_test_case = &pos->second;
}
}
std::string category = "0";
node_ptr test_failures_markup = 0;
{
boost::unordered_map<std::string, node_ptr>::const_iterator pos = failures_markup.libraries.find(test_log.library);
if(pos != failures_markup.libraries.end()) {
node_ptr library_markup = pos->second;
FOR_EACH_ELEMENT(elem, library_markup) {
if(check_name(elem, "test")) {
std::string test_name;
if(lookup_attr(elem, "name", test_name) && re_match(test_name, test_log.test_name)) {
lookup_attr(elem, "category", category);
FOR_EACH_ELEMENT(mark_failure, elem) {
FOR_EACH_ELEMENT(toolset, mark_failure) {
std::string toolset_name;
if(lookup_attr(toolset, "name", toolset_name) && re_match(toolset_name, test_log.toolset)) {
test_failures_markup = mark_failure;
goto found_explicit_failure_markup;
}
}
}
}
} else if(check_name(elem, "mark-expected-failures")) {
bool has_test = false;
bool has_toolset = false;
FOR_EACH_ELEMENT(subelem, elem) {
std::string name;
bool has_name = lookup_attr(subelem, "name", name);
if(has_name && check_name(subelem, "test") && re_match(name, test_log.test_name)) {
has_test = true;
} else if(has_name && check_name(subelem, "toolset") && re_match(name, test_log.toolset)) {
has_toolset = true;
}
if(has_toolset && has_test) {
test_failures_markup = elem;
goto found_explicit_failure_markup;
}
}
}
}
}
found_explicit_failure_markup:;
}
bool is_new = (expected_results_test_case == 0);
bool has_explicit_markup = (test_failures_markup != 0);
bool expected_result = !(has_explicit_markup || (expected_results_test_case && !*expected_results_test_case));
bool status = (expected_result == actual_result);
bool unexpected_success = (expected_result == false && actual_result == true);
std::string expected_reason;
lookup_attr(test_failures_markup, "reason", expected_reason);
if(unexpected_success && has_explicit_markup) {
add_note(test_log,
"This test case was explicitly marked up in \n"
"<a href=\"https://github.com/boostorg/boost/blob/" + source + "/status/explicit-failures-markup.xml\">\n"
" status/explicit-failures-markup.xml</a> file in the Boost repository as \"expected to fail\",\n"
"but is passing. Please consult the notes/output below for more details.\n");
}
if(has_explicit_markup && lookup_element(test_failures_markup, "note") == 0) {
if(unexpected_success) {
add_note(test_log,
"No explanation was provided for this markup. Please contact the library \n"
"author(s)/maintainer(s) for more details.\n");
} else {
add_note(test_log,
"This failure was explicitly marked as expected in \n"
"<a href=\"https://github.com/boostorg/boost/blob/" + source + "/status/explicit-failures-markup.xml\">\n"
"status/explicit-failures-markup.xml</a> file in the Boost repository. \n"
"Please contact the library author(s)/maintainer(s) for the explanation of this markup.\n");
}
}
if(node_ptr elem = lookup_element(test_failures_markup, "note")) {
test_log.notes.push_back(elem);
}
if(expected_results_test_case && !*expected_results_test_case) {
if(unexpected_success) {
add_note(test_log,
"This test case used to fail in the reference (\"last-known-good\") release.\n");
} else {
add_note(test_log,
"This failure was present in the reference (\"last-known-good\") release.\n");
}
}
if(!is_complete && !has_failures) {
add_note(test_log,
"<b>[Reporting Tools Internal Error]</b> This test case's XML is missing one or more log entries\n"
"of the regression run's steps associated with the test case's type (\"" + test_log.test_type + "\").\n"
"Please <a href=\"mailto:mailto:boost-testing@lists.boost.org\">contact reporting tools \n"
"maintainers</a> about this problem.\n", "internal-error-note");
}
test_log.result = actual_result;
test_log.expected_result = expected_result;
test_log.expected_reason = expected_reason;
test_log.status = status;
test_log.is_new = is_new;
test_log.category = category;
}
// requires: source is a Git branch name
void boost::regression::add_expected_results(
test_structure_t::run_t& tests,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source)
{
BOOST_FOREACH(test_structure_t::toolset_group_t::reference toolset, tests.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::reference library, toolset.second) {
BOOST_FOREACH(test_structure_t::library_t::reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::reference test_log, test_case.second) {
process_test_log(test_log, failures_markup, expected_results, source);
}
}
}
}
}

View File

@@ -1,26 +0,0 @@
// add_expected_results.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef ADD_EXPECTED_RESULTS_HPP_INCLUDED
#define ADD_EXPECTED_RESULTS_HPP_INCLUDED
#include "xml.hpp"
namespace boost {
namespace regression {
void add_expected_results(
test_structure_t::run_t& tests,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source);
}
}
#endif

View File

@@ -1,181 +0,0 @@
// boost_report.cpp
//
// Copyright (c) 2013
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENCE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "issues_page.hpp"
#include "links_page.hpp"
#include "result_page.hpp"
#include "issues_page.hpp"
#include "summary_page.hpp"
#include "add_expected_results.hpp"
#include "produce_expected_results.hpp"
#include "runners.hpp"
#include "xml.hpp"
#include <boost/shared_ptr.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
#include <boost/foreach.hpp>
#include <boost/exception/exception.hpp>
#include <boost/exception/diagnostic_information.hpp>
#include <boost/program_options.hpp>
#include <boost/range/algorithm/sort.hpp>
#include <iostream>
#include <fstream>
using namespace boost::regression;
boost::shared_ptr<boost::zip::zip_archive> global_zip;
int main(int argc, char* argv[]) {
boost::program_options::options_description desc;
boost::program_options::variables_map vm;
desc.add_options()
("input-file", boost::program_options::value<std::vector<std::string> >(), "Runner XML files")
("expected,e", boost::program_options::value<std::string>()->required(), "Expected results file")
("markup,m", boost::program_options::value<std::string>()->required(), "Failures markup file")
("tag", boost::program_options::value<std::string>()->required(), "the tag for the results (i.e. 'trunk')")
("run-date", boost::program_options::value<boost::posix_time::ptime>()->default_value(boost::posix_time::second_clock::universal_time()), "the timestamp of the report")
("reports,r", boost::program_options::value<std::vector<std::string> >(), "The reports to generate")
("css", boost::program_options::value<std::string>(), "The CSS file")
("comment", boost::program_options::value<std::string>()->required(), "The report comment file")
("help,h", "produce a help message")
;
boost::program_options::positional_options_description p;
p.add("input-file", -1);
try {
boost::program_options::store(boost::program_options::command_line_parser(argc, argv)
.options(desc).positional(p).run(), vm);
boost::program_options::notify(vm);
boost::posix_time::ptime now = vm["run-date"].as<boost::posix_time::ptime>();
std::string tag = vm["tag"].as<std::string>();
std::set<std::string> reports;
if(vm.count("reports")) {
BOOST_FOREACH(const std::string& report, vm["reports"].as<std::vector<std::string> >())
reports.insert(report);
}
std::vector<std::string> warnings;
test_structure_t structure;
failures_markup_t markup;
expected_results_t expected;
std::vector<test_structure_t::run_t*> runs;
std::cout << "Reading expected results" << std::endl;
boost::shared_ptr<document_type> expected_results = read_xml_file(vm["expected"].as<std::string>().c_str());
load_expected_results(&*expected_results, expected);
std::cout << "Reading failures markup" << std::endl;
boost::shared_ptr<document_type> failures_markup = read_xml_file(vm["markup"].as<std::string>().c_str());
load_failures_markup(&*failures_markup, markup);
std::ofstream zip_file("report.zip", std::ios_base::binary);
zip_file.exceptions(std::ios_base::failbit);
global_zip.reset(new boost::zip::zip_archive(zip_file));
if(vm.count("input-file")) {
std::vector<std::string> input_files = vm["input-file"].as<std::vector<std::string> >();
boost::sort(input_files);
BOOST_FOREACH(const std::string& file, input_files) {
boost::shared_ptr<document_type> test_results;
try {
std::cout << "Reading " << file << std::endl;
test_results = read_xml_file(file.c_str());
load_test_structure(&*test_results, structure, runs);
test_structure_t::run_t* test_run = runs.back();
std::cout << "Merging expected results" << std::endl;
add_expected_results(*test_run, markup, expected, tag);
std::cout << "Generating links pages" << std::endl;
// must be run before test_results is discarded
if(reports.count("l"))
links_page(markup, *test_run);
} catch(std::ios_base::failure& e) {
std::cerr << e.what() << std::endl;
} catch(boost::property_tree::detail::rapidxml::parse_error& e) {
std::cerr << e.what() << std::endl;
}
}
}
std::vector<std::string> modes;
modes.push_back("developer");
modes.push_back("user");
if (reports.count("i") != 0) {
std::cout << "Generating issues page" << std::endl;
issues_list("developer", structure, markup,
true, tag, now, warnings, "");
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "d"))
result_page(structure, markup,
false, tag, now, warnings, mode, vm["comment"].as<std::string>());
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "s"))
summary_page(mode, tag, now, std::vector<std::string>(),
structure, markup, false);
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "dr"))
result_page(structure, markup,
true, tag, now, warnings, mode, vm["comment"].as<std::string>());
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "sr"))
summary_page(mode, tag, now, std::vector<std::string>(),
structure, markup, true);
}
if (reports.count("e")) {
produce_expected_results(structure);
}
if(reports.count("n")) {
runners(structure);
}
if(vm.count("css")) {
std::cout << "Writing file master.css" << std::endl;
html_writer css("master.css");
std::string filename = vm["css"].as<std::string>();
std::ifstream input(filename.c_str());
if (input) {
std::string data(std::istreambuf_iterator<char>(input.rdbuf()), std::istreambuf_iterator<char>());
css << data;
} else {
std::cerr << "warning: Could not open file: " << filename << std::endl;
}
}
global_zip.reset();
} catch(boost::program_options::error& e) {
if(vm.count("help")) {
std::cerr << desc << std::endl;
} else {
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
} catch(boost::exception& e) {
std::cerr << boost::diagnostic_information(e) << std::endl;
return EXIT_FAILURE;
} catch(std::exception& e) {
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
}

View File

@@ -1,671 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2005.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "common.hpp"
#include "xml.hpp"
#include "html.hpp"
#include <boost/regex.hpp>
#include <boost/foreach.hpp>
#include <boost/algorithm/string/split.hpp>
#include <boost/algorithm/string/classification.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
#include <algorithm>
#include <set>
using namespace boost::regression;
std::string boost::regression::alternate_mode(const std::string& mode) {
if(mode == "user") {
return "developer";
} else {
return "user";
}
}
std::string boost::regression::release_postfix(bool is_release) {
if(is_release) {
return "_release";
} else {
return "";
}
}
// safe
void boost::regression::get_libraries(const test_structure_t& test_structure, std::set<std::string>& out) {
typedef boost::unordered_map<std::string, test_structure_t::platform_t>::const_reference outer_elem;
BOOST_FOREACH(outer_elem platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
out.insert(library.first);
}
}
}
}
}
#if 0
<func:function name="meta:test_case_status">
<xsl:param name="explicit_markup"/>
<xsl:param name="test_log"/>
<xsl:variable name="status">
<xsl:choose>
<xsl:when test="meta:is_unusable( $explicit_markup, $test_log/@library, $test_log/@toolset )">
<xsl:text>unusable</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='fail' and $test_log/@status='unexpected' and $test_log/@is-new='no'">
<xsl:text>fail-unexpected</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='fail' and $test_log/@status='unexpected' and $test_log/@is-new='yes'">
<xsl:text>fail-unexpected-new</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='success' and $test_log/@status='unexpected'">
<xsl:text>success-unexpected</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@status='expected'">
<xsl:text>expected</xsl:text>
</xsl:when>
<xsl:otherwise>
<xsl:text>other</xsl:text>
</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<func:result select="$status"/>
</func:function>
<func:function name="meta:is_toolset_required">
<xsl:param name="toolset"/>
<func:result select="count( $explicit_markup/explicit-failures-markup/mark-toolset[ @name = $toolset and @status='required' ] ) > 0"/>
</func:function>
#endif
bool boost::regression::is_library_beta(const failures_markup_t& explicit_markup, const std::string& library) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = explicit_markup.libraries.find(library);
if(pos != explicit_markup.libraries.end()) {
return check_attr(pos->second, "status", "beta");
}
return false;
}
bool boost::regression::is_test_log_a_test_case(const test_structure_t::test_log_t& test_log) {
const std::string& type = test_log.test_type;
return type == "compile" || type == "compile_fail" || type == "link" || type == "link_fail" ||
type == "run" || type == "run_fail" || type == "run_pyd" || type == "run_mpi";
}
// Does not assume any constraints on contents of the strings
bool boost::regression::is_unusable(const failures_markup_t& markup, const std::string& library, const std::string& toolset) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = markup.libraries.find(library);
if(pos != markup.libraries.end()) {
FOR_EACH_ELEMENT(mark_unusable, pos->second) {
if(check_name(mark_unusable, "mark-unusable")) {
FOR_EACH_ELEMENT(toolset_node, mark_unusable) {
std::string name;
if(lookup_attr(toolset_node, "name", name) && re_match(name, toolset)) {
return true;
}
}
}
}
}
return false;
}
void boost::regression::get_unusable(const failures_markup_t& markup,
const std::string& library,
const test_structure_t& test_structure,
boost::unordered_map<std::string, std::size_t>& out,
std::vector<node_ptr>& notes) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = markup.libraries.find(library);
if(pos != markup.libraries.end()) {
FOR_EACH_ELEMENT(mark_unusable, pos->second) {
if(check_name(mark_unusable, "mark-unusable")) {
node_ptr note = 0;
std::vector<std::string> toolsets;
FOR_EACH_ELEMENT(toolset_node, mark_unusable) {
std::string name;
if(check_name(toolset_node, "toolset") && lookup_attr(toolset_node, "name", name)) {
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
if(re_match(name, toolset.first)) {
toolsets.push_back(toolset.first);
}
}
}
}
} else if(check_name(toolset_node, "note")) {
note = toolset_node;
}
}
if(note != 0 && !toolsets.empty()) {
notes.push_back(note);
BOOST_FOREACH(const std::string& toolset, toolsets) {
out[toolset] = notes.size();
}
}
}
}
}
}
// There are no restrictions on the pattern or the
// string. The only special character in the pattern
// is '*', which matches any number of consecutive characters.
bool boost::regression::re_match(const std::string& pattern, const std::string& text) {
std::size_t pattern_start = 0;
std::size_t pattern_end = 0;
std::size_t text_start = 0;
// check that the leading portion of the string matches
std::size_t first = pattern.find('*');
if(first == std::string::npos) return pattern == text;
if(pattern.substr(0, first) != text.substr(0, first)) return false;
text_start = first;
pattern_start = pattern_end = first + 1;
for(; pattern_end != pattern.size(); ++pattern_end) {
// split into blocks at '*'
if(pattern[pattern_end] == '*') {
// and search for each block
std::size_t size = pattern_end - pattern_start;
std::size_t off = text.find(pattern.data() + pattern_start, text_start, size);
// if not found, the pattern doesn't match
if(off == std::string::npos) return false;
text_start = off + size;
pattern_start = pattern_end + 1; // skip past the '*'
}
}
// check that the tails of the strings are the same
std::size_t tail_size = pattern_end - pattern_start;
return tail_size <= text.size() - text_start &&
pattern.substr(pattern_start, tail_size) == text.substr(text.size() - tail_size, tail_size);
}
// date-time
// The result is clamped to the range [0,30]
int boost::regression::timestamp_difference(const boost::posix_time::ptime& x, const boost::posix_time::ptime& y) {
boost::posix_time::time_duration diff = y - x;
int result = diff.hours() / 24;
if(result < 0) return 0;
else if(result > 30) return 30;
else return result;
}
std::string boost::regression::format_timestamp(const boost::posix_time::ptime& timestamp) {
std::ostringstream stream;
stream.imbue(std::locale(std::locale::classic(), new boost::posix_time::time_facet("%a, %d %b %Y %H:%M:%S +0000")));
stream << timestamp;
return stream.str();
}
// path
// FIXME: The result MUST be a valid filesystem path.
std::string boost::regression::encode_path(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
if(ch == '.' || ch == '/') {
ch = '-';
}
// FIXME: allow only characters from the following set:
// "[a-z][A-Z][0-9][-+_. ,()$!~?]...
result += ch;
}
return result;
}
std::string boost::regression::escape_uri(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
if (('a' <= ch && ch <= 'z') ||
('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') ||
ch == '-' || ch == '_' || ch == '~' || ch == '.' ||
// We're intentionally allowing '/' to go through.
// to escape it as well, use escape_literal_uri
ch == '/' ||
// FIXME: reserved characters
ch == '+')
result += ch;
else {
unsigned digit = ch;
ch &= 0xFF;
const char * xdigits = "0123456789ABCDEF";
result += '%';
result += xdigits[digit >> 4];
result += xdigits[digit & 0xF];
}
}
return result;
}
std::string boost::regression::escape_literal_uri(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
// FIXME: Assumes UTF-8
if (('a' <= ch && ch <= 'z') ||
('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') ||
ch == '-' || ch == '_' || ch == '~' || ch == '.')
result += ch;
else {
unsigned digit = ch;
ch &= 0xFF;
const char * xdigits = "0123456789ABCDEF";
result += '%';
result += xdigits[digit >> 4];
result += xdigits[digit & 0xF];
}
}
return result;
}
// okay
std::string boost::regression::output_file_path(const std::string& path) {
return("output/" + (encode_path(path) + ".html"));
}
// okay
std::string boost::regression::log_file_path(
const failures_markup_t& explicit_markup,
const test_structure_t::test_log_t& test_log,
const std::string& runner,
const std::string& release_postfix)
{
if(show_output(explicit_markup, test_log)) {
return output_file_path(runner + "-" + test_log.target_directory + release_postfix);
} else {
return "";
}
}
bool boost::regression::show_library(const failures_markup_t& explicit_markup, const std::string& library, bool release) {
return !release || !is_library_beta(explicit_markup, library);
}
bool boost::regression::show_output(const failures_markup_t& explicit_markup, const test_structure_t::test_log_t& test_log) {
return ((!test_log.result || test_log.show_run_output) ||
(test_log.result && !test_log.status))
&& !(is_unusable(explicit_markup, test_log.library, test_log.toolset));
}
bool boost::regression::show_toolset(const failures_markup_t& explicit_markup, const std::string& toolset, bool release) {
return !release || explicit_markup.required_toolsets.find(toolset) != explicit_markup.required_toolsets.end();
}
// safe: no assumptions, enumerated result
std::string boost::regression::result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_log_group_t& test_logs) {
if(is_unusable(explicit_markup, library, toolset)) {
return "unusable";
}
if(test_logs.empty()) {
return "missing";
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_result && !log->is_new) {
return "fail-unexpected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_result && log->is_new) {
return "fail-unexpected-new";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_reason != "") {
return "fail-expected-unreasearched";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result) {
return "fail-expected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(log->result && !log->expected_result) {
return "success-unexpected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(log->result && log->expected_result) {
return "success-expected";
}
}
return "unknown";
}
// safe
std::string boost::regression::result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& test_logs)
{
test_log_group_t tmp;
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, test_logs) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference log, test_case.second) {
tmp.push_back(&log);
}
}
return result_cell_class(explicit_markup, library, toolset, tmp);
}
// requires: purpose must be well-formed html
void boost::regression::insert_report_header(
html_writer& document,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose)
{
document << "<div class=\"report-info\">\n";
document << " <div>\n";
document << " <b>Report Time: </b> " << format_timestamp(run_date) << "\n";
document << " </div>\n";
if(!purpose.empty()) {
document << " <div>\n";
document << " <b>Purpose: </b> " << purpose << "\n";
document << " </div>\n";
}
BOOST_FOREACH(const std::string& warning, warnings) {
document << " <div class=\"report-warning\">\n";
document << " <b>Warning: </b>\n";
document << " <a href=\"mailto:boost-testing@lists.boost.org?subject=[Report Pages]%20" << escape_literal_uri(warning) << " (" << format_timestamp(run_date) << ")\" class=\"warning-link\">\n";
document << " " << escape_xml(warning) << "\n";
document << " </a>\n";
document << " </div>\n";
}
document << "</div>\n";
}
// requires class_ is enumerated
void boost::regression::insert_view_link(html_writer& out, const std::string& page, const std::string& class_, bool release) {
if(release) {
out << "<a href=\"" << escape_uri(page) << ".html\" class=\"" << class_ << "\" target=\"_top\">"
"Full View"
"</a>\n";
} else {
out << "<a href=\"" << escape_uri(page) << "_release.html\" class=\"" << class_ << "\" target=\"_top\">"
"Release View"
"</a>";
}
}
// requires: mode = developer | user (Should be the opposite of the current page)
// requires: page is the base name of the current page. It should be valid
// according to encode_path, but should not be URI escaped.
void boost::regression::insert_page_links(html_writer& document,
const std::string& page,
bool release,
const std::string& mode) {
document << "<div class=\"links\">\n";
// yes, really. The class is set to ""
insert_view_link(document, page, "", release);
std::string release_postfix(release? "_release" : "");
document << "&#160;|&#160;"
"<a href=\"../" << mode << "/" << escape_uri(page) << release_postfix << ".html\" class=\"view-link\" target=\"_top\">"
<< mode << " View"
"</a>"
"&#160;|&#160;"
"<a href=\"" << escape_uri(page) << release_postfix << "_.html#legend\">Legend</a>\n"
"</div>\n";
}
// requires: mode = summary | details
// requires: top_or_bottom = top | bottom
void boost::regression::insert_runners_rows(html_writer& document,
const std::string& mode,
const std::string& top_or_bottom,
const test_structure_t& test_structure,
const boost::posix_time::ptime& run_date) {
std::string colspan = (mode == "summary") ? "1" : "2";
if(top_or_bottom == "top") {
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
std::size_t count = 0;
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
count += run.toolsets.size();
}
if(count > 0) {
document << " <td colspan=\"" << count << "\" class=\"runner\">\n"
" " << escape_xml(platform.first) << "\n"
" </td>\n";
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
}
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"runner\">\n"
" <a href=\"../" << escape_uri(encode_path(run.runner)) << ".html\">\n"
" " << escape_xml(run.runner) << "\n"
" </a>\n"
" </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
document << "<tr>\n"
"<td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"revision\">\n"
" rev " << run.revision.substr(0, 6) << "\n"
" </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
int age = timestamp_difference(run.timestamp, run_date);
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"timestamp\">\n"
" <span class=\"timestamp-" << age << "\">" << format_timestamp(run.timestamp) << "</span>";
if(run.run_type != "full") {
document << "<span class=\"run-type-" << run.run_type << "\">" << run.run_type[0] << "</span>\n";
}
document << " </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
if(top_or_bottom == "bottom") {
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
std::size_t count = 0;
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
count += run.toolsets.size();
}
if(count > 0) {
document << " <td colspan=\"" << count << "\" class=\"runner\">\n"
" " << escape_xml(platform.first) << "\n"
" </td>\n";
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
}
}
// requires mode = summary | details
void boost::regression::insert_toolsets_row(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
const std::string& mode,
const boost::posix_time::ptime& run_date,
const std::string& library,
const boost::unordered_map<std::string, std::size_t>& library_marks) {
document << "<tr valign=\"middle\">\n";
std::string colspan = (mode == "summary") ? "1" : "2";
std::string title = (mode == "summary") ?
"&#160;library&#160;/&#160;toolset&#160;" :
"&#160;test&#160;/&#160;toolset&#160;";
document << " <td class=\"head\" colspan=\"" << colspan << "\" width=\"1%\">" << title << "</td>\n";
BOOST_FOREACH(const test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(const test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
std::string name = toolset.first;
std::string class_ = (explicit_markup.required_toolsets.find(name) != explicit_markup.required_toolsets.end())?
"required-toolset-name" :
"toolset-name";
document << "<td class=\"" << class_ << "\">\n";
int age = timestamp_difference(run.timestamp, run_date);
document << "<span class=\"timestamp-" << age << "\">\n";
// break toolset names into words
BOOST_FOREACH(char ch, name) {
document << ch;
if(ch == '-') {
document << ' ';
}
}
if(mode == "details") {
// <!-- prepare toolset notes -->
std::set<std::size_t> toolset_notes;
typedef boost::unordered_map<std::string, std::size_t>::const_reference ref_type;
BOOST_FOREACH(ref_type toolset_markup, library_marks.equal_range(name)) {
toolset_notes.insert(toolset_markup.second);
}
if(!toolset_notes.empty()) {
document << "<span class=\"super\">\n";
bool first = true;
BOOST_FOREACH(std::size_t note_index, toolset_notes) {
if(!first) document << ", "; else first = false;
document << "<a href=\"#" << escape_uri(library) << "-note-" << note_index << "\" title=\"Note " << note_index << "\">\n"
" " << note_index << "\n"
"</a>\n";
}
document << "</span>\n";
}
}
document << "</span>\n"
"</td>\n";
}
}
}
document << "<td class=\"head\" width=\"1%\">" << title << "</td>\n"
"</tr>\n";
}
namespace {
std::string get_note_attr(const test_structure_t::note_t& note, const std::string& name) {
if(const node_ptr* node = boost::get<node_ptr>(&note)) {
std::string result;
lookup_attr(*node, name, result);
return result;
} else {
return std::string();
}
}
}
// requires: if note is a string, it is well-formed html
void boost::regression::show_note(
html_writer& document,
const test_structure_t::note_t& note,
const std::string& references,
const failures_markup_t& explicit_markup)
{
document << "<div class=\"note\">\n";
std::string author = get_note_attr(note, "author");
std::string date = get_note_attr(note, "date");
document << " <span class=\"note-header\">\n";
if(author != "" && date != "") {
document << " [&#160;" << escape_xml(author) << "&#160;" << escape_xml(date) << "&#160;]\n";
} else if(author != "") {
document << " [&#160;" << escape_xml(author) << "&#160;]\n";
} else if(date != "") {
document << " [&#160;" << escape_xml(date) << "&#160;]\n";
}
document << " </span>\n";
if(references != "") {
// lookup references (refid="17,18")
std::vector<std::string> refs;
boost::algorithm::split(refs, references, boost::is_any_of(","));
BOOST_FOREACH(const std::string& refid, refs) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = explicit_markup.notes.find(refid);
if(pos != explicit_markup.notes.end()) {
write_contents(document, pos->second);
} else {
document << " " << escape_xml(refid) << "\n";
}
}
}
if(const node_ptr* node_note = boost::get<node_ptr>(&note)) {
write_contents(document, *node_note);
} else if(const std::string* string_note = boost::get<std::string>(&note)) {
document << *string_note; // not escaped--can contain html markup
}
document << "</div>\n";
}
// requires: any note that is a string contains well-formed html
void boost::regression::show_notes(html_writer& document,
const std::vector<test_structure_t::note_t>& notes,
const failures_markup_t& explicit_markup)
{
document << "<div class=\"notes\">\n";
BOOST_FOREACH(const test_structure_t::note_t& note, notes) {
document << " <div>\n";
std::string refid = get_note_attr(note, "refid");
::show_note(document, note, refid, explicit_markup);
document << " </div>\n";
}
document << "</div>\n";
}

View File

@@ -1,109 +0,0 @@
// common.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef COMMON_HPP_INCLUDED
#define COMMON_HPP_INCLUDED
#include <vector>
#include <string>
#include <set>
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include "xml.hpp"
namespace boost {
namespace regression {
class html_writer;
typedef std::vector<const test_structure_t::test_log_t*> test_log_group_t;
bool is_library_beta(const failures_markup_t& explicit_markup, const std::string& library);
bool is_test_log_a_test_case(const test_structure_t::test_log_t& test_log);
bool is_unusable(const failures_markup_t& markup, const std::string& library, const std::string& toolset);
void get_unusable(const failures_markup_t& markup,
const std::string& library,
const test_structure_t& test_structure,
boost::unordered_map<std::string, std::size_t>& out,
std::vector<node_ptr>& notes);
bool re_match(const std::string& pattern, const std::string& text);
int timestamp_difference(const boost::posix_time::ptime& x, const boost::posix_time::ptime& y);
std::string format_timestamp(const boost::posix_time::ptime& timestamp);
std::string encode_path(const std::string& path);
std::string escape_uri(const std::string& path); // escapes a URI path (leaves '/' alone)
std::string escape_literal_uri(const std::string& path); // escapes all special characters in a URI
std::string output_file_path(const std::string& path);
std::string log_file_path(
const failures_markup_t& explicit_markup,
const test_structure_t::test_log_t& test_log,
const std::string& runner,
const std::string& release_postfix = "");
bool show_library(const failures_markup_t& explicit_markup, const std::string& library, bool release);
bool show_output(const failures_markup_t& markup, const test_structure_t::test_log_t& test_log);
bool show_toolset(const failures_markup_t& explicit_markup, const std::string& toolset, bool release);
void insert_report_header(html_writer& document,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose = "");
void insert_view_link(html_writer& document,
const std::string& page,
const std::string& class_,
bool release);
void insert_page_links(html_writer& document,
const std::string& page,
bool release,
const std::string& mode);
void insert_runners_rows(html_writer& document,
const std::string& mode,
const std::string& top_or_bottom,
const test_structure_t& test_structure,
const boost::posix_time::ptime& run_date);
void insert_toolsets_row(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
const std::string& mode,
const boost::posix_time::ptime& run_date,
const std::string& library = std::string(),
const boost::unordered_map<std::string, std::size_t>& library_marks = boost::unordered_map<std::string, std::size_t>());
void show_note(
html_writer& document,
const test_structure_t::note_t& note,
const std::string& references,
const failures_markup_t& explicit_markup);
void show_notes(html_writer& document, const std::vector<test_structure_t::note_t>& notes, const failures_markup_t& explicit_markup);
std::string result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_log_group_t& test_logs);
std::string result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& test_logs);
std::string alternate_mode(const std::string& mode);
std::string release_postfix(bool is_release);
void get_libraries(const test_structure_t& test_structure, std::set<std::string>& out);
}
}
#endif

View File

@@ -1,330 +0,0 @@
// html.cpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed Under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "html.hpp"
const char* const boost::regression::issues_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
"<tr>\n"
" <td>\n"
" <table border=\"0\" summary=\"legend\">\n"
" <tr>\n"
" <td>\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new\">&lt;toolset&gt;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-item\">Failure on a newly added test/compiler.</td>\n"
" </tr>\n"
" <tr>\n"
" <td>\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected\">&lt;toolset&gt;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-item\">Unexpected failure.</td>\n"
" </tr>\n"
" </table>\n"
" </td>\n"
"</tr>\n"
"</table>\n"
"</div>\n"
;
const char* const boost::regression::library_user_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-expected user-library-success-expected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">The test successfully passes.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected user-library-fail-expected\"><u>fail*</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>known failure</b> that the library maintainers are aware about. Please follow the link to \n"
" find out how it affects the library's functionality.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-unusable user-library-unusable\">unusable</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as <b>unusable</b> on this particular platform/toolset. Please\n"
" see the corresponding footnote.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unresearched legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected-unresearched user-library-fail-expected-unresearched\"><u>fail?</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" An <b>unsearched failure</b>: the library maintainers are aware of it, but need help with \n"
" investigating/addressing it for future releases. Please follow the link to \n"
" access the details and find out how it affects library functionality. </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new user-library-fail-unexpected-new\"><u>fail</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>new failure</b> on the test/compiler added in this release that hasn't been accounted for yet. \n"
" Please follow the link to access the details.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single user-library-row-single\"><td class=\"library-fail-unexpected\"><u>fail</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>regression</b> comparing to the previous release. Please follow the link to \n"
" access the details.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" </td>\n"
" <td class=\"legend-explanation\"></td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char* const boost::regression::library_developer_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-expected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Success.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected pass legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-unexpected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unexpected success; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected\">fail*</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Expected failure; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-unusable\">n/a</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">The library author marked it as unusable on this particular platform/toolset.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unresearched legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected-unresearched\">fail?</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unsearched failure; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Failure on a newly added test/compiler.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unexpected failure/regression.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" </td>\n"
" <td class=\"legend-explanation\"></td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char * const boost::regression::summary_user_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-expected user-summary-success-expected\">&#160;pass&#160;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All library's tests pass.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-expected user-summary-fail-expected\"><u>details</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Most of the library's tests pass, but there are some <b>known failures</b> which might affect the library's\n"
" functionality. Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected-new user-summary-fail-unexpected-new\"><u>details</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Some of the <b>newly added</b> library's tests fail, or some of the library's tests fail on\n"
" the <b>newly added compiler</b>, or some of the tests fail due to <b>unresearched \n"
" reasons</b>. Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected user-summary-fail-unexpected\"><u>regress.</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" There are some <b>regressions</b> in the library comparing to the previous release. \n"
" Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-unusable user-summary-unusable\">unusable</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as <b>unusable</b> on the particular platform/toolset.\n"
" Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\" id=\"Table1\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char * const boost::regression::summary_developer_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-expected\">OK</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All expected tests pass.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected pass legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-unexpected\">OK</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All expected tests pass, and some other tests that were expected to fail \n"
" unexpectedly pass as well.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected-new\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" There are some failures on the newly added tests/compiler(s).\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected\">broken</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Tests that the library author expects to pass are currently failing.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-unusable\">n/a</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as unusable on particular platform/toolset.\n"
" </td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\" id=\"Table1\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;

View File

@@ -1,25 +0,0 @@
// html.hpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef HTML_HPP_INCLUDED
#define HTML_HPP_INCLUDED
namespace boost {
namespace regression {
extern const char* const issues_legend;
extern const char* const library_user_legend;
extern const char* const library_developer_legend;
extern const char* const summary_user_legend;
extern const char* const summary_developer_legend;
}
}
#endif

View File

@@ -1,70 +0,0 @@
// html_writer.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef HTML_WRITER_HPP_INCLUDED
#define HTML_WRITER_HPP_INCLUDED
#include <boost/filesystem/path.hpp>
#include <boost/filesystem/convenience.hpp>
#include <boost/iostreams/device/file_descriptor.hpp>
#include <boost/noncopyable.hpp>
#include <boost/shared_ptr.hpp>
#include "zip.hpp"
#include <cstring>
extern boost::shared_ptr<boost::zip::zip_archive> global_zip;
namespace boost {
namespace regression {
class html_writer : boost::noncopyable {
public:
// path must be UTF-8 encoded. The separator is '/'
explicit html_writer(const std::string& path)
: sink(*global_zip, path)
{}
~html_writer() {
}
html_writer& operator<<(const std::string& arg) {
sink.write(arg.data(), arg.size());
return *this;
}
html_writer& operator<<(const char* arg) {
sink.write(arg, ::std::strlen(arg));
return *this;
}
html_writer& operator<<(char arg) {
sink.write(&arg, 1);
return *this;
}
html_writer& operator<<(std::size_t arg) {
char buf[30];
char* pos = &buf[0] + 30;
if(arg == 0) {
*--pos = '0';
}
for(; arg > 0; arg /= 10) {
*--pos = static_cast<char>('0' + (arg % 10));
}
sink.write(pos, buf + 30 - pos);
return *this;
}
html_writer& operator<<(int arg) {
if(arg < 0) *this << '-' << std::size_t(-arg);
else *this << std::size_t(arg);
return *this;
}
private:
boost::zip::nocompression_sink sink;
};
}
}
#endif

View File

@@ -1,294 +0,0 @@
// issues_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2004.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "issues_page.hpp"
#include "html_writer.hpp"
#include "xml.hpp"
#include "html.hpp"
#include "common.hpp"
#include <map>
#include <string>
#include <boost/foreach.hpp>
#include <boost/filesystem/fstream.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
using namespace boost::regression;
typedef std::pair<const test_structure_t::test_log_t*, const std::string*> test_failure_t;
typedef std::map<std::string, std::map<std::string, std::vector<test_failure_t> > > library_test_names_t;
typedef std::map<std::string, library_test_names_t> libraries_t;
namespace {
void failing_tests(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
libraries_t& out)
{
typedef boost::unordered_map<std::string, test_structure_t::platform_t> test_structure_top;
BOOST_FOREACH(test_structure_top::const_reference platform, tests.platforms) {
BOOST_FOREACH(const test_structure_t::run_t& runs, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, runs.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference test_log, test_case.second) {
if(test_log.status == false && test_log.result == false &&
explicit_markup.required_toolsets.find(test_log.toolset) != explicit_markup.required_toolsets.end() &&
is_test_log_a_test_case(test_log) &&
show_library(explicit_markup, library.first, release) &&
show_toolset(explicit_markup, toolset.first, release) &&
!(is_unusable(explicit_markup, library.first, toolset.first))) {
out[library.first][test_log.test_name][test_log.toolset].push_back(std::make_pair(&test_log, &runs.runner));
}
}
}
}
}
}
}
}
std::size_t count_failures(const library_test_names_t& library) {
std::size_t result = 0;
BOOST_FOREACH(library_test_names_t::const_reference test, library) {
BOOST_FOREACH(library_test_names_t::mapped_type::const_reference toolset, test.second) {
result += toolset.second.size();
}
}
return result;
}
// okay
void write_issues_list_reference_file(const std::string& out,
const std::string& source,
bool release,
const std::string& issues_list)
{
html_writer document(out);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n";
document << "<html>\n";
document << " <head>\n";
document << " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n";
document << " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n";
document << " <title>Boost regression unresolved issues: " << source << "</title>\n";
document << " </head>\n";
document << " <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n";
document << " <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n";
document << " <frame name=\"docframe\" src=\"" << issues_list << "\" scrolling=\"auto\"/>\n";
document << " </frameset>\n";
document << "</html>\n";
}
void print_failure_cell(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& output_directory,
const test_structure_t::test_log_t& test_log,
const std::string& toolset,
const std::string& runner,
const std::string& release_postfix)
{
std::string log_link = log_file_path(explicit_markup, test_log, runner, release_postfix);
const char* class_ = test_log.is_new?
"library-fail-unexpected-new" :
"library-fail-unexpected";
document << "<td class=\"" << class_ << "\">\n";
document << " <span>\n";
document << " <a href=\"" << escape_uri(log_link) << "\" class=\"log-link\" target=\"_top\">\n";
document << " " << escape_xml(toolset) << "\n";
document << " </a>\n";
document << " </span>\n";
document << "</td>\n";
}
void write_issues_list(const std::string& path,
const failures_markup_t& explicit_markup,
const std::string& output_directory,
const libraries_t& libraries,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose,
bool release)
{
//utils::log("Writing document " + path);
const char* release_postfix = release? "_release" : "";
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n";
document << "<html>\n";
document << " <head>\n";
document << " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n";
document << " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n";
document << " <title>Boost regression unresolved issues: " << source << "</title>\n";
document << " </head>\n";
document << " <body>\n";
document << "\n";
document << " <h1 class=\"page-title\">\n";
document << " Unresolved Issues: \n";
document << " <a class=\"hover-link\" href=\"summary" << release_postfix << ".html\" target=\"_top\">" << source << "</a>\n";
document << " </h1>\n";
document << "\n";
insert_report_header(document, run_date, warnings, purpose);
// Emit the index
document << " <h2>Libraries with unresolved failures</h2>\n";
document << " <div align=\"center\">\n";
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
document << " <a href=\"#" << escape_uri(library.first) << "\">\n";
document << " " << escape_xml(library.first) << "\n";
document << " </a>\n";
}
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
std::string library_page(encode_path(library.first));
const library_test_names_t& library_test_names(library.second);
std::size_t failures = count_failures(library.second);
document << " <h2>\n";
document << " <a name=\"" << escape_uri(library.first) << "\"/>\n";
document << " <a class=\"hover-link\" href=\"" << escape_uri(library_page)
<< release_postfix << ".html\" target=\"_top\">\n";
document << " " << escape_xml(library.first) << " (" << failures
<< " failure" << (failures == 1? "":"s") << ")\n";
document << " </a>\n";
document << " </h2>\n";
document << " <table class=\"library-issues-table\" summary=\"issues\">\n";
document << " <thead>\n";
document << " <tr valign=\"middle\">\n";
document << " <td class=\"head\">test</td>\n";
document << " <td class=\"head\">failures</td>\n";
document << " </tr>\n";
document << " </thead>\n";
document << " <tfoot>\n";
document << " <tr valign=\"middle\">\n";
document << " <td class=\"head\">test</td>\n";
document << " <td class=\"head\">failures</td>\n";
document << " </tr>\n";
document << " </tfoot>\n";
document << " <tbody>\n";
BOOST_FOREACH(library_test_names_t::const_reference test, library_test_names) {
const std::string& test_name = test.first;
const std::string& test_program = test.second.begin()->second.front().first->test_program;
document << " <tr>\n";
document << " <td class=\"test-name\">\n";
document << " <a href=\"https://github.com/boostorg/boost/blob/"
<< source << "/" << escape_uri(test_program) << "\" class=\"test-link\" target=\"_top\">\n";
document << " " << escape_xml(test_name) << "\n";
document << " </a>\n";
document << " </td>\n";
document << " <td class=\"failures-row\">\n";
document << " <table summary=\"unexpected fail legend\" class=\"issue-box\">\n";
document << " <tr class=\"library-row-single\">\n";
typedef library_test_names_t::mapped_type::const_reference toolset_t;
BOOST_FOREACH(toolset_t toolset, test.second) {
BOOST_FOREACH(const test_failure_t& failure, toolset.second) {
print_failure_cell(document, explicit_markup, output_directory, *failure.first, toolset.first, *failure.second, release_postfix);
}
}
document << " </tr>\n";
document << " </table>\n";
document << " </td>\n";
document << " </tr>\n";
}
document << " </tbody>\n";
document << " </table>\n";
}
document << " </div>\n";
document << issues_legend;
document << " </body>\n";
document << "</html>\n";
}
// okay
void write_issues_email(const std::string& path,
const boost::posix_time::ptime& run_date,
const std::string& source,
const libraries_t& libraries)
{
boost::filesystem::ofstream document(path);
std::cout << "Writing document " << path << std::endl;
//utils::log(boost::format("Writing document %s") % path);
std::size_t failing_tests = 0;
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
failing_tests += count_failures(library.second);
}
document << "Boost regression test failures\n"
"------------------------------\n"
"Report time: " << run_date << "\n"
"\n"
"This report lists all regression test failures on release platforms.\n"
"\n"
"Detailed report: \n"
" http://beta.boost.org/development/tests/" << source << "/developer/issues.html\n"
"\n"
<< failing_tests << " failure" << (failing_tests == 1? "" : "s")
<< " in " << libraries.size() << " librar" << (libraries.size() == 1? "y" : "ies") << ":\n";
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
document << " " << library.first << " (" << count_failures(library.second) << ")\n";
}
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
std::string library_page = encode_path(library.first);
document << "\n"
"|" << library.first << "|\n";
BOOST_FOREACH(libraries_t::mapped_type::const_reference test_name, library.second) {
document << " " << test_name.first << ":";
BOOST_FOREACH(libraries_t::mapped_type::mapped_type::const_reference toolset, test_name.second) {
document << " " << toolset.first;
}
document << "\n";
}
}
}
}
void boost::regression::issues_list(const std::string& output_dir,
const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose)
{
::libraries_t libraries;
failing_tests(tests, explicit_markup, release, libraries);
std::string release_postfix_(release_postfix(release));
std::string issues_list("issues" + release_postfix_ + "_.html");
write_issues_list_reference_file(output_dir + "/issues.html", source, release, issues_list);
write_issues_list(output_dir + "/" + issues_list, explicit_markup, output_dir, libraries, source, run_date, warnings, purpose, release);
write_issues_email(output_dir + "/issues-email.txt",
run_date,
source,
libraries);
}

View File

@@ -1,31 +0,0 @@
// issues_page.hpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "xml.hpp"
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
namespace boost {
namespace regression {
void issues_list(const std::string& output_dir,
const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose);
}
}

View File

@@ -1,369 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2006.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "links_page.hpp"
#include "xml.hpp"
#include "common.hpp"
#include "html_writer.hpp"
#include "html.hpp"
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/foreach.hpp>
#include <string>
#include <vector>
using namespace boost::regression;
namespace {
void links_page(const failures_markup_t& explicit_markup,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp,
const std::string& library_name,
const std::string& toolset_name,
const std::string& test_name,
const std::vector<test_structure_t::test_log_t>& test_logs);
void write_variants_reference_file(const std::string& path,
const std::string& variants_file_path,
const std::string release_postfix,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id);
std::string output_page_header(node_ptr test_log, const std::string& runner_id);
void write_variants_file(const failures_markup_t& explicit_markup,
const std::string& path,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp);
void write_test_result_file(const failures_markup_t& explicit_markup,
const std::string& path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp);
void write_test_results_reference_file(const std::string& path,
const std::string& log_file_path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id);
// requires: revision must be a SVN revision. i.e. of the form nnnnn
void links_page(const failures_markup_t& explicit_markup,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp,
const std::string& library_name,
const std::string& toolset_name,
const std::string& test_name,
const std::vector<test_structure_t::test_log_t>& test_logs) {
//utils::log("Processing test \"" + runner_id + "/" + library_name + "/" + test_name + "/" + toolset_name + "\"");
const char* postfixes[] = {"", "_release"};
const char* dirs[] = { "developer", "user" };
if(test_logs.size() > 1) {
// utils::log(" Processing variants");
std::string variants_file_path = output_file_path(runner_id + "-" + library_name + "-" + toolset_name + "-" + test_name + "-variants");
write_variants_file(explicit_markup, variants_file_path, test_logs, runner_id, revision, timestamp);
BOOST_FOREACH(const std::string& release_postfix, postfixes) {
BOOST_FOREACH(const std::string& directory, dirs) {
std::string variants__file_path = directory + "/" + (encode_path(runner_id + "-" + library_name + "-" + toolset_name + "-" + test_name + "-variants_" + release_postfix) + ".html");
write_variants_reference_file(variants__file_path, "../" + variants_file_path, release_postfix, test_logs, runner_id);
}
}
}
BOOST_FOREACH(const test_structure_t::test_log_t& test_log, test_logs) {
//utils::log(" Processing test-log");
if(show_output(explicit_markup, test_log)) {
std::string log_path = log_file_path(explicit_markup, test_log, runner_id);
write_test_result_file(explicit_markup, log_path, test_log, runner_id, revision, timestamp);
BOOST_FOREACH(const std::string& release_postfix, postfixes) {
BOOST_FOREACH(const std::string& directory, dirs) {
std::string reference_file_path = directory + "/" + log_file_path(explicit_markup, test_log, runner_id, release_postfix);
write_test_results_reference_file(reference_file_path, log_path, test_log, runner_id);
}
}
}
}
}
// okay. result is unconstrained
std::string output_page_header(const test_structure_t::test_log_t& test_log, const std::string& runner_id) {
if(test_log.test_name != "") {
return runner_id + " - " + test_log.library + " - " + test_log.test_name + " / " + test_log.toolset;
} else {
return test_log.target_directory;
}
}
// requires: path must be a valid file path.
// requires: variants_file_path must be the path to the variants file relative to path
void write_variants_reference_file(const std::string& path,
const std::string& variants_file_path,
const std::string release_postfix,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id)
{
//utils::log(" Writing variants reference file %s" % path);
std::string component = output_page_header(test_logs[0], runner_id);
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << escape_uri(variants_file_path) << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
// requires revision is an SVN revision #
// requires path is a valid path
void write_variants_file(const failures_markup_t& explicit_markup,
const std::string& path,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp)
{
//utils::log(" Writing variants file " + path.string());
html_writer document(path);
std::string component = output_page_header(test_logs[0], runner_id);
int age = 0; // timestamp_difference(timestamp, run_date);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <body>\n"
" <div class=\"log-test-header\">\n"
" <div class=\"log-test-title\">\n"
" Test output: " << escape_xml(component) << "\n"
" </div>\n"
" <div><span class=\"timestamp-" << age << "\">\n"
" Rev " << revision << " /\n"
" " << format_timestamp(timestamp) << "\n"
" </span></div>\n"
" </div>\n"
"\n"
" <p>Output by test variants:</p>\n"
" <table>\n";
BOOST_FOREACH(const test_structure_t::test_log_t& log, test_logs) {
document << " <tr>\n"
" <td>\n";
std::string log_file = log_file_path(explicit_markup, log, runner_id);
if(!log_file.empty()) {
document << " <a href=\"../" << escape_uri(log_file) << "\">\n"
" " << escape_xml(log.target_directory) << "\n"
" </a>\n";
} else {
document << " " << escape_xml(log.target_directory) << "\n";
}
document << " </td>\n"
" </tr>\n";
}
document << " </table>\n"
" </body>\n"
"</html>\n";
}
// okay
const test_structure_t::target_t* lookup_target(const test_structure_t::test_log_t& test_log, const std::string& name) {
boost::unordered_map<std::string, test_structure_t::target_t>::const_iterator pos = test_log.targets.find(name);
if(pos != test_log.targets.end()) {
return &pos->second;
} else {
return 0;
}
}
// requires: path is a valid path
// requires: revision is an SVN revision
void write_test_result_file(const failures_markup_t& explicit_markup,
const std::string& path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp)
{
//utils::log(boost::format(" Writing log file document %s") % path);
html_writer document(path);
std::string component = output_page_header(test_log, runner_id);
int age = 0; // timestamp_difference(timestamp, run_date);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n";
document << " <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n";
document << " <body>\n"
" <div class=\"log-test-header\">\n"
" <div class=\"log-test-title\">\n"
" Test output: " << escape_xml(component) << "\n"
" </div>\n"
" <div><span class=\"timestamp-" << age << "\">\n"
" Rev " << revision << " /\n"
" " << format_timestamp(timestamp) << "\n"
" </span></div>\n"
" </div>\n";
if(!test_log.notes.empty()) {
document << " <div class=\"notes\">\n"
" <div class=\"notes-title\">Notes</div>\n";
show_notes(document, test_log.notes, explicit_markup);
document << " </div>\n";
}
if(const test_structure_t::target_t* compile = lookup_target(test_log, "compile")) {
const char* compile_result = compile->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-compiler-output-title\">Compile [" << escape_xml(compile->timestamp) << "]:"
" <span class=\"output-" << compile_result << "\">" << compile_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, compile->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* link = lookup_target(test_log, "link")) {
const char* link_result = link->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Link [" << escape_xml(link->timestamp) << "]:"
" <span class=\"output-" << link_result << "\">" << link_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, link->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* lib = lookup_target(test_log, "lib")) {
const char* lib_result = lib->result? "succeed" : "fail";
std::string lib_name(lib->contents->value(), lib->contents->value_size());
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Lib [" << escape_xml(lib->timestamp) << "]:"
" <span class=\"output-" << lib_result << "\">" << lib_result << "</span></div>\n";
document << " <p>\n";
document << " See <a href=\"" << escape_uri(encode_path(runner_id + "-" + lib_name)) << ".html\">\n";
document << " " << escape_xml(lib_name) << "\n";
document << " </a>\n";
document << " </p>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* run = lookup_target(test_log, "run")) {
const char* run_result = run->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Run [" << escape_xml(run->timestamp) << "]:"
" <span class=\"output-" << run_result << "\">" << run_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, run->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
document << " </body>\n";
document << "</html>\n";
}
// requires path is a valid path
// requires: log_file_path is the location of the log file relative to path
void write_test_results_reference_file(const std::string& path,
const std::string& log_file_path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id)
{
std::string component = output_page_header(test_log, runner_id);
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"../toc.html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"../../" << escape_uri(log_file_path) << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
}
// okay
void boost::regression::links_page(
const failures_markup_t& explicit_markup,
const test_structure_t::run_t& test_run)
{
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, test_run.toolsets) {
BOOST_FOREACH(const test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(const test_structure_t::library_t::const_reference test_case, library.second) {
::links_page(explicit_markup,
test_run.runner,
test_run.revision,
test_run.timestamp,
library.first,
toolset.first,
test_case.first,
test_case.second);
}
}
}
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, test_run.non_test_case_targets) {
BOOST_FOREACH(const test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(const test_structure_t::library_t::const_reference test_case, library.second) {
::links_page(explicit_markup,
test_run.runner,
test_run.revision,
test_run.timestamp,
library.first,
toolset.first,
test_case.first,
test_case.second);
}
}
}
}

View File

@@ -1,25 +0,0 @@
// links_page.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef LINKS_PAGE_HPP_INCLUDED
#define LINKS_PAGE_HPP_INCLUDED
#include "xml.hpp"
#include <boost/filesystem/path.hpp>
namespace boost {
namespace regression {
void links_page(
const failures_markup_t& explicit_markup,
const test_structure_t::run_t& test_run);
}
}
#endif

View File

@@ -1,28 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2005.
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "produce_expected_results.hpp"
#include "html_writer.hpp"
#include <iostream>
void boost::regression::produce_expected_results(const test_structure_t& tests) {
std::cout << "Warning: expected results not implemented" << std::endl;
return;
html_writer document("expected_results.xml");
document << "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n";
document << "<root>\n";
document << " <expected-failures>\n";
#if 0
foreach test-log
<xsl:if test="meta:is_test_log_a_test_case(.)">
<test-result library="{@library}" test-name="{@test-name}" toolset="{@toolset}" result="{@result}" />
</xsl:if>
#endif
document << " </expected-failures>\n";
document << "</root>\n";
}

View File

@@ -1,22 +0,0 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_PRODUCE_EXPECTED_RESULTS_HPP
#define BOOST_REGRESSION_PRODUCE_EXPECTED_RESULTS_HPP
#include "xml.hpp"
namespace boost {
namespace regression {
void produce_expected_results(const test_structure_t& tests);
}
}
#endif

View File

@@ -1,525 +0,0 @@
// result_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010-2011
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "result_page.hpp"
#include "common.hpp"
#include "html.hpp"
#include "html_writer.hpp"
#include "xml.hpp"
#include <boost/foreach.hpp>
#include <boost/unordered_map.hpp>
#include <set>
#include <map>
#include <string>
#include <vector>
#include <utility>
#include <iostream>
#include <fstream>
using namespace boost::regression;
namespace {
// safe: no assumptions, no unconstrained output
void test_type_col(html_writer& document, const std::string& test_type) {
document << "<td class=\"test-type\">\n";
document << " <a href=\"http://www.boost.org/status/compiler_status.html#Understanding\" class=\"legend-link\" target=\"_top\">";
if(test_type == "run_pyd") {
document << "r";
} else if(test_type == "run_mpi") {
document << "r";
} else if(test_type == "run") {
document << "r";
} else if(test_type == "run_fail") {
document << "rf";
} else if(test_type == "compile") {
document << "c";
} else if(test_type == "compile_fail") {
document << "cf";
} else if(test_type == "link") {
document << "l";
} else if(test_type == "link_fail") {
document << "lf";
} else {
throw std::runtime_error("Incorrect test type \"" + test_type + "\"");
}
document << " </a>\n";
document << "</td>\n";
}
// category/name
typedef std::pair<std::string, std::string> test_case_id_t;
// runner/toolset
typedef std::pair<std::string, std::string> test_toolset_id_t;
typedef std::vector<const test_structure_t::test_log_t*> test_log_group_t;
typedef boost::unordered_map<test_toolset_id_t, test_log_group_t> test_logs_by_run_t;
typedef std::map<test_case_id_t, test_logs_by_run_t> test_logs_t;
// requires: result contains no HTML special characters
// requires: log_link must not contain a '/' derived from the input (This won't actually break anything, though)
void insert_cell_link(html_writer& document, const std::string& result, const std::string& log_link) {
if(log_link != "") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(log_link) << "\" class=\"log-link\" target=\"_top\">"
<< result <<
"</a>"
"&#160;&#160;";
} else {
document << "&#160;&#160;" << result << "&#160;&#160;";
}
}
// requires:
void insert_cell_developer(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const std::string& test_name,
const std::string& runner,
const std::string& toolset,
const test_log_group_t& test_logs) {
std::string class_ = "library-" + result_cell_class(explicit_markup, library, toolset, test_logs);
std::string cell_link = (test_logs.size() > 1)?
encode_path(runner + "-" + library + "-" + toolset + "-" + test_logs.front()->test_name + "-variants_" + release_postfix(release)) + ".html" :
(test_logs.empty())?
std::string("") :
log_file_path(explicit_markup, *test_logs.front(), runner, release_postfix(release));
document << "<td class=\"" << class_ << "\" title=\"" << escape_xml(test_name) << "/" << escape_xml(toolset) << "\">\n";
if(is_unusable(explicit_markup, library, toolset)) {
insert_cell_link(document, "n/a", cell_link);
} else if(test_logs.empty()) {
document << "&#160;&#160;&#160;&#160;\n";
} else {
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->status) {
insert_cell_link(document, (log->expected_reason != "")? "fail?" : "fail*", cell_link);
goto done;
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && !log->status) {
insert_cell_link(document, "fail", cell_link);
goto done;
}
}
insert_cell_link(document, "pass", cell_link);
}
done:
document << "</td>\n";
}
// requires:
void insert_cell_user(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const std::string& test_name,
const std::string& runner,
const std::string& toolset,
const test_log_group_t& test_logs) {
std::string class_ = "library-" + result_cell_class(explicit_markup, library, toolset, test_logs);
std::string cell_link = (test_logs.size() > 1)?
encode_path(runner + "-" + library + "-" + toolset + "-" + test_logs.front()->test_name + "-variants_" + release_postfix(release)) + ".html" :
(test_logs.empty())?
std::string("") :
log_file_path(explicit_markup, *test_logs.front(), runner, release_postfix(release));
document << "<td class=\"" << class_ << " user-" << class_ << "\" title=\"" << escape_xml(test_name) << "/" << escape_xml(toolset) << "\">\n";
if(is_unusable(explicit_markup, library, toolset)) {
insert_cell_link(document, "unusable", cell_link);
} else if(test_logs.empty()) {
document << "&#160;&#160;&#160;&#160;\n";
} else {
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->status) {
insert_cell_link(document, (log->expected_reason != "")? "fail?" : "fail*", cell_link);
goto done;
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && !log->status) {
insert_cell_link(document, "fail", cell_link);
goto done;
}
}
insert_cell_link(document, "pass", cell_link);
}
done:
document << "</td>\n";
}
// requires: line_mod should be from an enumerated set
// requires: source is a Git branch name
// requires: mode = developer | user
void insert_test_line(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
test_logs_t::const_reference test_results,
const std::vector<std::pair<std::string, std::string> >& all_toolsets,
const std::string& line_mod,
const std::string& source,
const std::string& mode) {
// This is guaranteed to exist because of the way the nested maps are created
const test_structure_t::test_log_t * first_log = (*test_results.second.begin()).second.front();
std::string test_program(first_log->test_program);
std::string::size_type pos = test_program.find(library);
if (pos != std::string::npos)
test_program.erase(0, pos + library.size());
std::string test_header =
"<td class=\"test-name\">\n"
" <a href=\"https://github.com/boostorg/" + library + "/blob/" + source + escape_uri(test_program) + "\" class=\"test-link\" target=\"_top\">\n"
" " + escape_xml(test_results.first.second) + "\n" // FIXME: sanitize test name
" </a>\n"
"</td>\n";
document << "<tr class=\"library-row" << line_mod << "\">\n"
<< test_header;
test_log_group_t empty_test_log;
test_type_col(document, first_log->test_type);
BOOST_FOREACH(const test_toolset_id_t& run, all_toolsets) {
const std::string& toolset = run.second;
const std::string& runner = run.first;
test_logs_by_run_t::const_iterator pos = test_results.second.find(run);
const test_log_group_t* test_result_for_toolset =
(pos != test_results.second.end())?
&pos->second :
&empty_test_log;
if(mode == "user") {
insert_cell_user(document, explicit_markup, release, library, test_results.first.second, runner, toolset, *test_result_for_toolset);
} else {
insert_cell_developer(document, explicit_markup, release, library, test_results.first.second, runner, toolset, *test_result_for_toolset);
}
}
document << test_header
<< "</tr>\n";
}
// requires: source is a Git branch name
// requires: mode = developer | user
void insert_test_section(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const test_logs_t& logs,
const std::vector<std::pair<std::string, std::string> >& all_toolsets,
const std::string& source,
const std::string& mode) {
std::size_t category_span = 3;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
category_span += (run.toolsets.size());
}
}
for(test_logs_t::const_iterator pos = logs.begin(), end = logs.end(); pos != end; ++pos) {
std::string test_name = pos->first.second;
bool category_start = (pos == logs.begin()) || (pos->first.first != boost::prior(pos)->first.first);
bool category_end = (boost::next(pos) == end) || (pos->first.first != boost::next(pos)->first.first);
std::string line_mod =
(category_start && category_end)? "-single" :
category_start? "-first" :
category_end? "-last" :
"";
if(category_start && pos->first.first != "0") {
document << "<tr>\n"
" <td class=\"library-test-category-header\" colspan=\"" << category_span << "\" align=\"center\">\n"
" " << escape_xml(pos->first.first) << "\n"
" </td>\n"
"</tr>\n";
}
insert_test_line(document, explicit_markup, release, library, *pos, all_toolsets, line_mod, source, mode);
}
}
}
// requires: mode = developer | user
// requires: source = Boost SVN branch name
void boost::regression::result_page(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& mode,
const boost::filesystem::path& comment_file)
{
// skip debug.xml
std::string index_path("index" + release_postfix(release) + "_.html");
{
std::cout << "Writing document " << "index" << release_postfix(release) << ".html" << std::endl;
html_writer index(mode + "/" + "index" + release_postfix(release) + ".html");
index << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << source << "</title>\n"
"</head>\n"
"<frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << index_path << "\" scrolling=\"auto\"/>\n"
"</frameset>\n"
"</html>\n";
}
std::cout << "Writing document " << index_path << std::endl;
{
html_writer index(mode + "/" + index_path);
index << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << source << "</title>\n"
"</head>\n"
"<body>\n"
"\n"
" <img border=\"0\" src=\"http://www.boost.org/boost.png\" width=\"277\" height=\"86\" align=\"right\" alt=\"Boost logo\"></img>\n"
"\n"
" <h1 class=\"page-title\">\n"
<< mode << " report: "
" <a class=\"hover-link\" href=\"summary.html\" target=\"_top\">" << source << "</a>\n"
" </h1>\n"
"\n";
std::string purpose = (mode == "user")?
"The purpose of this report is to help a user to find out whether a particular library "
"works on the particular compiler(s). For SVN \"health report\", see "
" <a href=\"../developer/index.html\" target=\"_top\">developer summary</a>."
:
"Provides Boost developers with visual indication of the SVN \"health\". For user-level "
"report, see <a href=\"../user/index.html\" target=\"_top\">user summary</a>.";
insert_report_header(index, run_date, warnings, purpose);
index << " <div class=\"comment\">\n";
if(comment_file != "") {
std::ifstream comment(comment_file.string().c_str());
if(!comment) {
throw std::ios_base::failure("Could not open file " + comment_file.string());
}
std::string comment_data(std::istreambuf_iterator<char>(comment.rdbuf()), std::istreambuf_iterator<char>());
index << comment_data;
}
index << " </div>\n";
index << "</body>\n";
index << "</html>\n";
}
std::set<std::string> libraries;
get_libraries(tests, libraries);
{
std::string toc_path("toc" + release_postfix(release) + ".html");
std::cout << "Writing document " << toc_path << std::endl;
html_writer toc(mode + "/" + toc_path);
toc << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
<< "<html>\n"
<< "<head>\n"
<< " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
<< " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
<< " <title>Boost regression: " << source << "</title>\n"
<< "</head>\n"
<< "<body class=\"" << mode << "-toc\">\n"
<< " <div class=\"toc-header-entry\">\n"
<< " <a href=\"index" << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">Report info</a>\n"
<< " </div>\n"
<< " <div class=\"toc-header-entry\">\n"
<< " <a href=\"summary" << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">Summary</a>\n"
<< " </div>\n";
if(mode == "developer") {
toc << " <div class=\"toc-header-entry\">\n"
" <a href=\"issues.html\" class=\"toc-entry\" target=\"_top\">Unresolved issues</a>\n"
" </div>\n";
}
toc << " <div class=\"toc-header-entry\">\n";
insert_view_link(toc, "index", "toc-entry", release);
toc << " </div>\n";
toc << " <hr/>\n";
BOOST_FOREACH(const std::string& library, libraries) {
std::string library_page(encode_path(library));
toc << " <div class=\"toc-entry\">\n"
" <a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">\n"
" " << escape_xml(library) << "\n"
" </a>\n"
" </div>\n";
}
toc << "</body>\n"
"</html>\n";
}
BOOST_FOREACH(const std::string& library, libraries) {
if(show_library(explicit_markup, library, release)) {
std::string library_results(encode_path(library) + release_postfix(release) + "_.html");
std::string library_page(encode_path(library) + release_postfix(release) + ".html");
std::cout << "Writing document " << library_page << std::endl;
{
html_writer document(mode + "/" + library_page);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression: " << escape_xml(library) << "/" << source << "</title>\n"
"</head>\n"
"<frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
"<frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
"<frame name=\"docframe\" src=\"" << escape_uri(library_results) << "\" scrolling=\"auto\"/>\n"
"</frameset>\n"
"</html>\n";
}
std::cout << "Writing document " << library_results << std::endl;
{
html_writer document(mode + "/" + library_results);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << escape_xml(library) << "/" << source << "</title>\n"
"</head>\n"
"\n"
"<body>\n";
insert_page_links(document, encode_path(library), release, alternate_mode(mode));
document << "<h1 class=\"page-title\">\n"
" <a class=\"hover-link\" name=\"" << escape_xml(library) << "\" href=\"http://www.boost.org/libs/" << escape_uri(library) << "\" target=\"_top\">"
<< escape_xml(library) <<
"</a>"
"/"
"<a class=\"hover-link\" href=\"summary.html\" target=\"_top\">" << source << "</a>\n"
"</h1>\n";
insert_report_header(document, run_date, warnings);
// toolset/note/index
boost::unordered_map<std::string, std::size_t> library_marks;
std::vector<node_ptr> notes;
get_unusable(explicit_markup, library, tests, library_marks, notes);
document << "<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" class=\"library-table\" width=\"1%\" summary=\"Library results\">\n"
" <thead>\n";
insert_runners_rows(document, "details", "top", tests, run_date); // okay
insert_toolsets_row(document, tests, explicit_markup, "details", run_date, library, library_marks);
document << " </thead>\n"
" <tfoot>\n";
insert_toolsets_row(document, tests, explicit_markup, "details", run_date, library, library_marks);
insert_runners_rows(document, "details", "bottom", tests, run_date);
document << " </tfoot>\n"
" <tbody>\n";
test_logs_t lib_tests;
std::vector<std::pair<std::string, std::string> > all_toolsets;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
all_toolsets.push_back(std::make_pair(run.runner, toolset.first));
test_structure_t::toolset_t::const_iterator pos = toolset.second.find(library);
if(pos != toolset.second.end()) {
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, pos->second) {
test_log_group_t test_logs;
BOOST_FOREACH(test_structure_t::test_case_t::const_reference log, test_case.second) {
if(is_test_log_a_test_case(log)) {
test_logs.push_back(&log);
}
}
if(!test_logs.empty()) {
std::string category = test_logs.front()->category;
lib_tests[std::make_pair(category, test_case.first)][std::make_pair(run.runner, toolset.first)] = test_logs;
}
}
}
}
}
}
insert_test_section(document, tests, explicit_markup, release, library, lib_tests, all_toolsets, source, mode);
document << " </tbody>\n"
"</table>\n";
if(!notes.empty()) {
document << "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"library-library-notes\" summary=\"library notes\">\n";
for(std::size_t i = 0; i < notes.size(); ++i) {
document << "<tr class=\"library-library-note\">\n"
" <td valign=\"top\" width=\"3em\">\n"
" <a name=\"" << escape_uri(library) << "-note-" << (i + 1) << "\">\n"
" <span class=\"super\">" << (i + 1) << "</span>\n"
" </a>\n"
" </td>\n"
" <td>\n";
std::string refid;
lookup_attr(notes[i], "refid", refid);
show_note(document, notes[i], refid, explicit_markup);
document << " </td>\n"
"</tr>\n";
}
document << "</table>\n";
}
document << "<div id=\"legend\">\n"
<< (mode == "developer"? library_developer_legend : library_user_legend) << "\n"
"</div>\n";
insert_page_links(document, encode_path(library), release, alternate_mode(mode));
document << "</body>\n";
document << "</html>\n";
}
}
}
}

View File

@@ -1,29 +0,0 @@
// result_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010-2011
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
#include "xml.hpp"
namespace boost {
namespace regression {
void result_page(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& mode,
const boost::filesystem::path& comment_file);
}
}

View File

@@ -1,57 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2004.
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "runners.hpp"
#include "html_writer.hpp"
#include "common.hpp"
#include <boost/foreach.hpp>
#include <iostream>
void boost::regression::runners(const test_structure_t& tests) {
{
html_writer document("runners.html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"master.css\" title=\"master\" />\n"
" <title>runners</title>\n"
" </head>\n"
" <body>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
document << " <table>"
"<tr>"
"<td>"
"<a href=\"" << escape_uri(encode_path(run.runner)) << ".html\">" << escape_xml(run.runner) << "</a>"
"</td>"
"</tr>"
"</table>\n";
}
}
document << " </body>\n"
"</html>\n";
}
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
std::cout << "Writing runner document " << encode_path(run.runner) << ".html" << std::endl;
html_writer document(encode_path(run.runner) + ".html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
"<head>\n"
"<title>" << escape_xml(run.runner) << "</title>\n"
"</head>\n"
"<body>\n"
"<h1>" << escape_xml(run.runner) << "</h1>\n"
"<hr></hr>"
<< run.comment // Intentionally not escaped--contains html formatting
<< "</body>\n"
"</html>\n";
}
}
}

View File

@@ -1,22 +0,0 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_RUNNERS_HPP
#define BOOST_REGRESSION_RUNNERS_HPP
#include "xml.hpp"
namespace boost {
namespace regression {
void runners(const test_structure_t& tests);
}
}
#endif

View File

@@ -1,260 +0,0 @@
// Copyright MetaCommunications, Inc. 2003-2004.
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <boost/foreach.hpp>
#include <boost/next_prior.hpp>
#include "common.hpp"
#include "summary_page.hpp"
#include "html_writer.hpp"
#include "html.hpp"
#include <iostream>
using namespace boost::regression;
namespace {
// report developer status
// safe
void insert_cell_developer(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& current_cell,
bool release) {
std::string class_ = "summary-" + result_cell_class(explicit_markup, library, toolset, current_cell);
std::string library_page = encode_path(library);
document << "<td class=\"" << class_ << "\" title=\"" << escape_xml(library) << "/" << escape_xml(toolset) << "\">\n";
if(class_ == "summary-unusable") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"n/a"
"</a>"
"&#160;&#160;";
} else if(class_ == "summary-missing") {
document << "&#160;&#160;&#160;&#160;";
} else if(class_ == "summary-fail-unexpected") {
document << "<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"broken"
"</a>";
} else if(class_ == "summary-fail-unexpected-new") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"fail"
"</a>"
"&#160;&#160;";
} else {
document << "&#160;&#160;OK&#160;&#160;";
}
document << "</td>\n";
}
// report user status
// safe
void insert_cell_user(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& current_cell,
bool release) {
std::string class_ = "summary-" + result_cell_class(explicit_markup, library, toolset, current_cell);
std::string library_page = encode_path(library);
document << "<td class=\"" << class_ << " user-" << class_ << "\" title=\"" << escape_xml(library) << "/" << escape_xml(toolset) << "\">\n";
if(class_ == "summary-unusable") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"unusable"
"</a>"
"&#160;";
} else if(class_ == "summary-missing") {
document << "&#160;no&#160;results&#160;";
} else if(class_ == "summary-fail-unexpected") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"regress."
"</a>"
"&#160;";
} else if(class_ == "summary-fail-unexpected-new" ||
class_ == "summary-fail-expected" ||
class_ == "summary-unknown-status" ||
class_ == "summary-fail-unexpected-unresearched") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"details"
"</a>"
"&#160;";
} else {
document << "&#160;pass&#160;";
}
document << "</td>\n";
}
}
// requires: mode = developer | user
// requires: source is the name of an SVN branch
void boost::regression::summary_page(const std::string& mode,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const test_structure_t& tests,
const failures_markup_t & explicit_markup,
bool release) {
std::set<std::string> sorted_libraries;
get_libraries(tests, sorted_libraries);
std::string summary_results("summary" + release_postfix(release) + "_.html");
std::cout << "Writing document " << "summary" << release_postfix(release) << ".html" << std::endl;
{
html_writer document(mode + "/" + "summary" + release_postfix(release) + ".html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD html 4.01 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression summary: " << source << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << summary_results << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
// Summary results
std::cout << "Writing document " << summary_results << std::endl;
{
html_writer document(mode + "/" + summary_results);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression summary: " << source << "</title>\n"
"</head>\n"
"<body>\n";
insert_page_links(document, "summary", release, alternate_mode(mode));
document << "<h1 class=\"page-title\">\n"
" Summary: \n"
" <a class=\"hover-link\" href=\"summary" << release_postfix(release) << ".html\" target=\"_top\">" << source << "</a>\n"
"</h1>\n";
insert_report_header(document, run_date, warnings);
std::size_t num_unusable = 0;
std::size_t num_regressions = 0;
std::size_t num_new_failures = 0;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
bool unusable = is_unusable(explicit_markup, library.first, toolset.first);
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference test_log, test_case.second) {
if(unusable) ++num_unusable;
else if(!test_log.result && !test_log.status) {
if(test_log.is_new) ++num_new_failures;
else ++num_regressions;
}
}
}
}
}
}
}
document << "<div class=\"statistics\">\n"
"Unusable: " << num_unusable << "\n"
"&#160;|&#160;\n"
"Regressions: " << num_regressions << "\n"
"&#160;|&#160;\n"
"New failures: " << num_new_failures << "\n"
"</div>\n";
// summary table
document << "<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" width=\"1%\" class=\"summary-table\" summary=\"Overall summary\">\n";
document << "<thead>\n";
insert_runners_rows(document, "summary", "top", tests, run_date);
insert_toolsets_row(document, tests, explicit_markup, "summary", run_date);
document << "</thead>\n";
document << "<tfoot>\n";
insert_toolsets_row(document, tests, explicit_markup, "summary", run_date);
insert_runners_rows(document, "summary", "bottom", tests, run_date);
document << "</tfoot>\n";
document << "<tbody>\n";
BOOST_FOREACH(const std::string& library, sorted_libraries) {
std::string library_page = encode_path(library);
std::string library_header =
"<td class=\"library-name\">\n"
" <a href=\"" + escape_uri(library_page) + release_postfix(release) + ".html\" class=\"library-link\" target=\"_top\">\n"
" " + escape_xml(library) + "\n"
" </a>\n"
"</td>\n";
std::string line_mod;
if(sorted_libraries.size() == 1) line_mod = "-single";
else if(library == *sorted_libraries.begin()) line_mod = "-first";
else if(library == *boost::prior(sorted_libraries.end())) line_mod = "-last";
document << "<tr class=\"summary-row" << line_mod << "\">\n";
document << library_header;
test_structure_t::library_t empty_library;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
test_structure_t::toolset_t::const_iterator pos = toolset.second.find(library);
const test_structure_t::library_t * current_cell =
(pos != toolset.second.end())?
&pos->second : &empty_library;
if(mode == "user") {
insert_cell_user(document, explicit_markup, library, toolset.first, *current_cell, release);
} else {
insert_cell_developer(document, explicit_markup, library, toolset.first, *current_cell, release);
}
}
}
}
document << library_header;
document << "</tr>\n";
}
document << "</tbody>\n";
document << "</table>\n";
document << "<div id=\"legend\">\n"
<< (mode == "developer"? summary_developer_legend : summary_user_legend) << "\n"
"</div>\n";
insert_page_links(document, "summary", release, alternate_mode(mode));
document << "</body>\n";
document << "</html>\n";
}
}

View File

@@ -1,32 +0,0 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_SUMMARY_PAGE_HPP
#define BOOST_REGRESSION_SUMMARY_PAGE_HPP
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
#include "xml.hpp"
namespace boost {
namespace regression {
void summary_page(const std::string& mode,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const test_structure_t& tests,
const failures_markup_t & explicit_markup,
bool release);
}
}
#endif

View File

@@ -1,387 +0,0 @@
// xml.cpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "xml.hpp"
#include "common.hpp"
#include <boost/date_time/posix_time/time_parsers.hpp>
#include <boost/functional/hash.hpp>
#include <fstream>
#include <boost/format.hpp>
#include <boost/foreach.hpp>
using namespace boost::regression;
std::size_t boost::regression::hash_value(const test_case_t& test_case) {
std::size_t result = 0;
boost::hash_combine(result, test_case.test_name);
boost::hash_combine(result, test_case.library);
boost::hash_combine(result, test_case.toolset_name);
return result;
}
bool boost::regression::operator==(const test_case_t& lhs, const test_case_t& rhs) {
return lhs.test_name == rhs.test_name &&
lhs.library == rhs.library &&
lhs.toolset_name == rhs.toolset_name;
}
boost::regression::attr_ptr boost::regression::lookup_attr(node_ptr element, const std::string& name) {
if(element == 0) return 0;
return element->first_attribute(name.data(), name.size());
}
bool boost::regression::lookup_attr(node_ptr element, const std::string& name, std::string& result) {
if(element == 0) return false;
if(attr_ptr attr = lookup_attr(element, name)) {
result = std::string(attr->value(), attr->value_size());
return true;
} else {
return false;
}
}
void require_attr(node_ptr element, const std::string& name, std::string& result) {
if(!lookup_attr(element, name, result)) {
throw xml_error("Missing attribute " + name + " in element " + std::string(element->name(), element->name_size()));
}
}
bool boost::regression::check_attr(node_ptr element, const std::string& name, const std::string& expected) {
if(attr_ptr attr = lookup_attr(element, name)) {
return std::string(attr->value(), attr->value_size()) == expected;
} else {
return false;
}
}
bool boost::regression::check_name(node_ptr element, const std::string& name) {
return std::string(element->name(), element->name_size()) == name;
}
bool boost::regression::check_attr(node_ptr element,
const std::string& element1,
const std::string& attr,
const std::string& expected) {
if(element == 0) return false;
else if(element1 == "*") {
FOR_EACH_ELEMENT(nested, element) {
if(check_attr(nested, attr, expected)) {
return true;
}
}
return false;
} else {
return check_attr(lookup_element(element, element1), attr, expected);
}
}
boost::regression::node_ptr boost::regression::lookup_element(node_ptr element, const std::string& name) {
if(element == 0) {
return 0;
} else {
return element->first_node(name.data(), name.size());
}
}
int boost::regression::count_element(node_ptr element, const std::string& name) {
int result = 0;
element = element->first_node(name.data(), name.size());
while(element != 0) {
++result;
element = element->next_sibling(name.data(), name.size());
}
return result;
}
std::string boost::regression::value_of(node_ptr element) {
if(element && element->value() != 0) {
return std::string(element->value(), element->value_size());
} else {
return std::string();
}
}
void boost::regression::load_failures_markup(node_ptr root, failures_markup_t& failures_markup) {
if(check_name(root, "library")) {
std::string library;
lookup_attr(root, "name", library);
failures_markup.libraries.insert(std::make_pair(library, root));
} else if(check_name(root, "mark-toolset")) {
if(check_attr(root, "status", "required")) {
std::string name;
if(lookup_attr(root, "name", name)) {
failures_markup.required_toolsets.insert(name);
}
}
} else if(check_name(root, "note")) {
std::string refid;
if(lookup_attr(root, "id", refid)) {
failures_markup.notes.insert(std::make_pair(refid, root));
}
} else {
FOR_EACH_ELEMENT(elem, root) {
load_failures_markup(elem, failures_markup);
}
}
}
namespace {
void load_test_log(node_ptr root, test_structure_t::test_log_t& test_log) {
lookup_attr(root, "library", test_log.library);
lookup_attr(root, "test-program", test_log.test_program);
test_log.show_run_output = check_attr(root, "show-run-output", "true");
lookup_attr(root, "toolset", test_log.toolset);
lookup_attr(root, "test-type", test_log.test_type);
lookup_attr(root, "test-name", test_log.test_name);
lookup_attr(root, "target-directory", test_log.target_directory);
// these are set by add_expected_results
test_log.result = false; // check_attr(root, "result", "success");
test_log.expected_result = false; // check_attr(root, "expected-result", "success");
// lookup_attr(root, "expected-reason", test_log.expected_reason);
test_log.status = check_attr(root, "status", "expected");
test_log.is_new = check_attr(root, "is-new", "yes");
lookup_attr(root, "category", test_log.category);
// process compile/run/etc.
FOR_EACH_ELEMENT(elem, root) {
std::string name(elem->name(), elem->name_size());
if(name != "") {
test_structure_t::target_t& target = test_log.targets[name];
target.type = name;
lookup_attr(elem, "timestamp", target.timestamp);
target.result = !check_attr(elem, "result", "fail");
target.contents = elem;
}
}
}
void collect_toolsets(node_ptr root, test_structure_t::toolset_group_t& out, test_structure_t::toolset_group_t& non_test_case_targets) {
if(check_name(root, "test-log")) {
std::string toolset;
if(lookup_attr(root, "toolset", toolset)) {
std::string library, test_name;
lookup_attr(root, "library", library);
lookup_attr(root, "test-name", test_name);
test_structure_t::test_log_t log;
load_test_log(root, log);
if(is_test_log_a_test_case(log))
out[toolset][library][test_name].push_back(log);
else
non_test_case_targets[toolset][library][test_name].push_back(log);
}
} else {
FOR_EACH_ELEMENT(elem, root) {
collect_toolsets(elem, out, non_test_case_targets);
}
}
}
// FIXME: Make sure that Boost.DateTime handles parsing errors correctly
boost::posix_time::ptime parse_time(std::string arg) {
// fix up some formatting problems
if(!arg.empty() && arg[arg.size() - 1] == 'Z') arg.resize(arg.size() - 1);
std::replace(arg.begin(), arg.end(), 'T', ' ');
return boost::posix_time::time_from_string(arg);
}
void validate_run(const test_structure_t::run_t& run) {
if(run.run_type != "incremental" && run.run_type != "full") {
BOOST_THROW_EXCEPTION(xml_error("Expected run-type to be \"incremental\" or \"full\""));
}
// For Git, revision is a SHA, and thus may contain alpha characters
// BOOST_FOREACH(char ch, run.revision) {
// if(!('0' <= ch && ch <= '9')) {
// BOOST_THROW_EXCEPTION(xml_error("Expected revision to be a numeric constant"));
// }
// }
}
}
void boost::regression::load_test_structure(node_ptr root, test_structure_t& structure, std::vector<test_structure_t::run_t*>& runs) {
if(check_name(root, "test-run")) {
test_structure_t::run_t run;
std::string timestamp;
require_attr(root, "runner", run.runner);
require_attr(root, "platform", run.platform);
require_attr(root, "run-type", run.run_type);
require_attr(root, "source", run.source);
require_attr(root, "revision", run.revision);
require_attr(root, "timestamp", timestamp);
// "2010-05-11T18:29:17Z"
run.timestamp = parse_time(timestamp);
run.comment = value_of(lookup_element(root, "comment"));
validate_run(run);
collect_toolsets(root, run.toolsets, run.non_test_case_targets);
structure.platforms[run.platform].push_back(run);
runs.push_back(&structure.platforms[run.platform].back());
} else {
FOR_EACH_ELEMENT(elem, root) {
load_test_structure(elem, structure, runs);
}
}
}
namespace {
struct escaped {
const char* input;
std::size_t size;
bool trim;
};
// okay
void write_characters(html_writer& document, const char* input, std::size_t size) {
for(std::size_t i = 0; i < size; ++i) {
if(input[i] == '<') {
document << "&lt;";
} else if(input[i] == '>') {
document << "&gt;";
} else if(input[i] == '&') {
document << "&amp;";
} else {
document << input[i];
}
}
}
// FIXME: do not break in the middle of a code point
html_writer& operator<<(html_writer& document, const escaped& text) {
std::size_t max_size = 1 << 16;
if(text.trim && (text.size > max_size)) {
write_characters(document, text.input, max_size);
document << str(boost::format("...\n\n[The content has been trimmed by the report system because it exceeds %d bytes]") % max_size);
} else {
write_characters(document, text.input, text.size);
}
return document;
}
escaped make_escaped(const char* input, std::size_t size, bool trim) {
escaped result = { input, size, trim };
return result;
}
std::string escape_characters(const char* input, std::size_t size) {
std::string result;
for(std::size_t i = 0; i < size; ++i) {
if(input[i] == '<') {
result += "&lt;";
} else if(input[i] == '>') {
result += "&gt;";
} else if(input[i] == '&') {
result += "&amp;";
} else if(input[i] == '\'') {
result += "&apos;";
} else if(input[i] == '"') {
result += "&quot;";
} else {
result += input[i];
}
}
return result;
}
}
std::string boost::regression::escape_xml(const std::string& s) {
return escape_characters(s.data(), s.size());
}
void boost::regression::write_to_stream(html_writer& os, node_ptr node, bool trim) {
using namespace boost::property_tree::detail::rapidxml;
switch(node->type()) {
case node_document:
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem);
}
break;
case node_element:
os << '<' << escape_characters(node->name(), node->name_size());
for(attr_ptr attr = node->first_attribute(); attr != 0; attr = attr->next_attribute()) {
os << ' ' << std::string(attr->name(), attr->name_size()) << '=' << '"' << escape_characters(attr->value(), attr->value_size()) << '"';
}
os << '>';
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem);
}
os << '<' << '/' << escape_characters(node->name(), node->name_size()) << '>';
break;
case node_data:
os << make_escaped(node->value(), node->value_size(), trim);
break;
default:
throw xml_error("Don't know how to handle element type");
}
}
void boost::regression::write_contents(html_writer& os, node_ptr node, bool trim) {
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem, trim);
}
}
namespace {
struct node_storage : document_type {
std::vector<char> storage;
};
}
boost::shared_ptr<document_type> boost::regression::read_xml_file(const char* filename) {
std::ifstream input(filename);
if(!input) {
throw(std::ios_base::failure(std::string("Could not open file: ") + filename));
}
boost::shared_ptr<node_storage> result(new node_storage());
std::streambuf* buf = input.rdbuf();
std::streambuf::int_type ch;
while((ch = buf->sbumpc()) != std::char_traits<char>::eof()) {
result->storage.push_back(ch);
}
result->storage.push_back('\0');
result->parse<boost::property_tree::detail::rapidxml::parse_default>(&result->storage[0]);
return result;
}
namespace {
void load_expected_results(node_ptr root, test_case_t id, expected_results_t& expected_results) {
if(check_name(root, "test-result")) {
lookup_attr(root, "test-name", id.test_name);
bool result = !check_attr(root, "result", "fail");
expected_results.tests.insert(std::make_pair(id, result));
} else {
if(check_name(root, "toolset")) {
std::string name;
lookup_attr(root, "name", name);
id.toolset_name = name;
FOR_EACH_ELEMENT(elem, root) {
if(check_name(elem, "toolset-alias")) {
std::string alias_name;
if(lookup_attr(elem, "name", alias_name)) {
expected_results.toolset_aliases.insert(std::make_pair(alias_name, name));
}
}
}
} else if(check_name(root, "library")) {
lookup_attr(root, "name", id.library);
}
FOR_EACH_ELEMENT(elem, root) {
load_expected_results(elem, id, expected_results);
}
}
}
}
void boost::regression::load_expected_results(node_ptr root, expected_results_t& expected_results) {
test_case_t id;
::load_expected_results(root, id, expected_results);
}

View File

@@ -1,133 +0,0 @@
// xml.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef XML_HPP_INCLUDED
#define XML_HPP_INCLUDED
#include <string>
#include <vector>
#include <iosfwd>
#include <boost/unordered_map.hpp>
#include <boost/unordered_set.hpp>
#include <boost/property_tree/detail/rapidxml.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/variant.hpp>
#include <boost/shared_ptr.hpp>
#include "html_writer.hpp"
namespace boost {
namespace regression {
class xml_error : public std::exception {
public:
explicit xml_error(const std::string& m) : message(m) {}
virtual ~xml_error() throw() {}
virtual const char * what() const throw() { return message.c_str(); }
private:
std::string message;
};
typedef boost::property_tree::detail::rapidxml::xml_node<> node_type;
typedef boost::property_tree::detail::rapidxml::xml_attribute<> attr_type;
typedef boost::property_tree::detail::rapidxml::xml_document<> document_type;
typedef node_type* node_ptr;
typedef attr_type* attr_ptr;
typedef document_type* document_ptr;
struct test_case_t {
std::string toolset_name;
std::string library;
std::string test_name;
};
std::size_t hash_value(const test_case_t& test_case);
bool operator==(const test_case_t& lhs, const test_case_t& rhs);
struct expected_results_t {
typedef boost::unordered_map<test_case_t, bool> tests_t;
typedef boost::unordered_map<std::string, std::string> toolset_aliases_t;
tests_t tests;
toolset_aliases_t toolset_aliases;
};
void load_expected_results(node_ptr root, expected_results_t& expected_results);
struct test_structure_t {
struct target_t {
std::string type;
std::string timestamp;
bool result;
node_ptr contents;
};
typedef boost::variant<std::string, node_ptr> note_t;
struct test_log_t {
std::string library;
std::string test_program;
bool show_run_output;
std::string toolset;
std::string test_type;
std::string test_name;
std::string target_directory;
bool result;
bool expected_result;
std::string expected_reason;
bool status;
bool is_new;
std::string category;
boost::unordered_map<std::string, target_t> targets;
std::vector<note_t> notes;
};
typedef std::vector<test_log_t> test_case_t;
typedef std::map<std::string, test_case_t> library_t;
typedef std::map<std::string, library_t> toolset_t;
typedef std::map<std::string, toolset_t> toolset_group_t;
struct run_t {
std::string runner;
std::string platform;
std::string run_type;
std::string source;
std::string revision;
std::string comment;
boost::posix_time::ptime timestamp;
toolset_group_t toolsets;
toolset_group_t non_test_case_targets;
};
typedef std::vector<run_t> platform_t;
typedef std::map<std::string, platform_t> platform_group_t;
platform_group_t platforms;
};
void load_test_structure(node_ptr root, test_structure_t& structure, std::vector<test_structure_t::run_t*>& runs);
struct failures_markup_t {
boost::unordered_map<std::string, node_ptr> libraries;
boost::unordered_set<std::string> required_toolsets;
boost::unordered_map<std::string, node_ptr> notes;
};
void load_failures_markup(node_ptr root, failures_markup_t& failures_markup);
#define FOR_EACH_ELEMENT(name, node)\
for(::boost::regression::node_ptr name = (node)->first_node(); name != 0; name = name->next_sibling())
attr_ptr lookup_attr(node_ptr element, const std::string& name);
bool lookup_attr(node_ptr element, const std::string& name, std::string& result);
bool check_attr(node_ptr element, const std::string& name, const std::string& expected);
bool check_name(node_ptr element, const std::string& name);
bool check_attr(node_ptr element, const std::string& element1, const std::string& attr, const std::string& expected);
node_ptr lookup_element(node_ptr element, const std::string& name);
int count_element(node_ptr element, const std::string& name);
std::string value_of(node_ptr element);
std::string escape_xml(const std::string& s);
void write_to_stream(html_writer& os, node_ptr node, bool trim=false);
void write_contents(html_writer& document, node_ptr node, bool trim=false);
boost::shared_ptr<document_type> read_xml_file(const char* filename);
}
}
#endif

View File

@@ -1,768 +0,0 @@
// zip.hpp
//
// Copyright (c) 2010, 2013
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_ZIP_ZIP_HPP_INCLUDED
#define BOOST_ZIP_ZIP_HPP_INCLUDED
#include <cstring>
#include <ostream>
#include <string>
#include <cstddef>
#include <vector>
#include <bitset>
#include <ios>
#include <boost/array.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <boost/cstdint.hpp>
#include <boost/crc.hpp>
#include <boost/noncopyable.hpp>
#include <boost/iostreams/categories.hpp>
#include <boost/iostreams/operations.hpp>
#include <boost/iostreams/filter/zlib.hpp>
#include <boost/mpl/integral_c.hpp>
namespace boost {
namespace zip {
// TODO: Handle endian conversions
#define BOOST_ZIP_DEFINE_HEADER(name, type, offset) \
static const int name##_offset = (offset); \
static type get_##name(const char* header) { \
type result; \
::std::memcpy(&result, header + (offset), sizeof(type)); \
return result; \
} \
static void set_##name(char* header, type x) { \
::std::memcpy(header + (offset), &x, sizeof(type)); \
}
class zip_archive {
public:
zip_archive(std::ostream& file) : output_file(file), current_offset(0), num_files(0) {}
~zip_archive() {
close();
}
class file_handle;
friend class file_handle;
class file_handle {
public:
typedef char char_type;
struct category :
::boost::iostreams::sink_tag,
::boost::iostreams::closable_tag
{};
file_handle(zip_archive& archive,
const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
const boost::posix_time::ptime& modification_time)
{
self = 0;
archive.open_file(path, creator_version, minimum_required_version, flags, compression_method, modification_time, this);
}
file_handle(zip_archive& archive,
const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method)
{
self = 0;
archive.open_file(path.data(), path.size(), creator_version, minimum_required_version, flags, compression_method, 0, 0, this);
}
::std::streamsize write(const char* data, ::std::streamsize size) {
assert(self != 0);
self->output_file.write(data, size);
compressed_size += size;
self->current_offset += size;
return size;
}
void write_uncompressed(const char* data, ::std::streamsize size) {
assert(self != 0);
crc.process_bytes(data, static_cast<std::size_t>(size));
uncompressed_size += size;
}
void close() {
central_directory_entry::set_crc32(&self->central_directory[offset], crc.checksum());
// These lines cause a warning. Since the warning is legitimate,
// I'm leaving it.
central_directory_entry::set_compressed_size(&self->central_directory[offset], compressed_size);
central_directory_entry::set_uncompressed_size(&self->central_directory[offset], uncompressed_size);
boost::array<char, 12> buffer;
data_descriptor::set_crc32(&buffer[0], crc.checksum());
data_descriptor::set_compressed_size(&buffer[0], compressed_size);
data_descriptor::set_uncompressed_size(&buffer[0], uncompressed_size);
std::streamsize current_pos = self->output_file.tellp();
self->output_file.seekp(pos);
self->output_file.write(&buffer[0], 12);
self->output_file.seekp(current_pos);
self = 0;
}
private:
friend class zip_archive;
file_handle(const file_handle&);
file_handle& operator=(const file_handle&);
boost::crc_32_type crc;
std::streamsize pos;
std::size_t offset;
std::streamsize compressed_size;
std::streamsize uncompressed_size;
zip_archive* self;
};
void open_file(const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
const boost::posix_time::ptime& modification_time,
file_handle* out
)
{
boost::uint16_t date =
modification_time.date().day() +
(modification_time.date().month() << 5) +
((modification_time.date().year() - 1980) << 9);
boost::uint16_t time =
(modification_time.time_of_day().seconds() / 2) +
(modification_time.time_of_day().minutes() << 5) +
(modification_time.time_of_day().hours() << 11);
open_file(path.data(), path.size(), creator_version, minimum_required_version, flags, compression_method, time, date, out);
}
void open_file(const char* path, std::size_t path_size,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
boost::uint16_t modification_time,
boost::uint16_t modification_date,
file_handle* handle
)
{
// The file_handle should not be open
assert(handle->self == 0);
handle->pos = static_cast<std::streamsize>(output_file.tellp()) + local_file_header::crc32_offset;
std::vector<char> header(30);
local_file_header::set_signature(&header[0], local_file_header::signature);
local_file_header::set_minimum_required_version(&header[0], minimum_required_version);
local_file_header::set_flags(&header[0], flags);
local_file_header::set_compression_method(&header[0], compression_method);
local_file_header::set_filename_size(&header[0], path_size);
// TODO: handle Zip64
header.insert(header.end(), path, path + path_size);
output_file.write(&header[0], header.size());
std::size_t offset = central_directory.size();
central_directory.resize(offset + 46);
central_directory_entry::set_signature(&central_directory[offset], central_directory_entry::signature);
central_directory_entry::set_creator_version(&central_directory[offset], creator_version);
central_directory_entry::set_minimum_required_version(&central_directory[offset], minimum_required_version);
central_directory_entry::set_flags(&central_directory[offset], flags);
central_directory_entry::set_compression_method(&central_directory[offset], compression_method);
central_directory_entry::set_modification_time(&central_directory[offset], modification_time);
central_directory_entry::set_modification_date(&central_directory[offset], modification_date);
central_directory_entry::set_filename_size(&central_directory[offset], path_size);
central_directory_entry::set_extra_size(&central_directory[offset], 0);
central_directory_entry::set_comment_size(&central_directory[offset], 0);
central_directory_entry::set_file_start_disk(&central_directory[offset], 0);
central_directory_entry::set_internal_attributes(&central_directory[offset], 0);
central_directory_entry::set_external_attributes(&central_directory[offset], 0);
central_directory_entry::set_local_header_offset(&central_directory[offset], current_offset);
central_directory.insert(central_directory.end(), path, path + path_size);
handle->crc.reset();
handle->offset = offset;
handle->compressed_size = 0;
handle->uncompressed_size = 0;
handle->self = this;
current_offset += header.size();
++num_files;
}
void write_file(const std::string& path, const char* contents, std::size_t size) {
std::vector<char> header(30);
local_file_header::set_signature(&header[0], local_file_header::signature);
local_file_header::set_minimum_required_version(&header[0], 10);
local_file_header::set_flags(&header[0], 0);
local_file_header::set_compression_method(&header[0], compression_method::none);
crc_32_type crc;
crc.process_bytes(contents, size);
local_file_header::set_crc32(&header[0], crc.checksum());
local_file_header::set_compressed_size(&header[0], size);
local_file_header::set_uncompressed_size(&header[0], size);
local_file_header::set_filename_size(&header[0], path.size());
// TODO: handle Zip64
header.insert(header.end(), path.begin(), path.end());
output_file.write(&header[0], header.size());
output_file.write(contents, size);
std::size_t offset = central_directory.size();
central_directory.resize(offset + 46);
central_directory_entry::set_signature(&central_directory[offset], central_directory_entry::signature);
central_directory_entry::set_creator_version(&central_directory[offset], 10);
central_directory_entry::set_minimum_required_version(&central_directory[offset], 10);
central_directory_entry::set_flags(&central_directory[offset], 0);
central_directory_entry::set_compression_method(&central_directory[offset], compression_method::none);
// FIXME: find correct date and time
central_directory_entry::set_modification_time(&central_directory[offset], 0);
central_directory_entry::set_modification_date(&central_directory[offset], 0);
central_directory_entry::set_crc32(&central_directory[offset], crc.checksum());
central_directory_entry::set_compressed_size(&central_directory[offset], size);
central_directory_entry::set_uncompressed_size(&central_directory[offset], size);
central_directory_entry::set_filename_size(&central_directory[offset], path.size());
central_directory_entry::set_extra_size(&central_directory[offset], 0);
central_directory_entry::set_comment_size(&central_directory[offset], 0);
central_directory_entry::set_file_start_disk(&central_directory[offset], 0);
central_directory_entry::set_internal_attributes(&central_directory[offset], 0);
central_directory_entry::set_external_attributes(&central_directory[offset], 0);
central_directory_entry::set_local_header_offset(&central_directory[offset], current_offset);
central_directory.insert(central_directory.end(), path.begin(), path.end());
current_offset = current_offset + header.size() + size;
++num_files;
}
void close() {
output_file.write(&central_directory[0], central_directory.size());
if(num_files >= 65536) {
boost::array<char, zip64_end_of_central_directory::size> data;
zip64_end_of_central_directory::set_signature(&data[0], zip64_end_of_central_directory::signature);
zip64_end_of_central_directory::set_size(&data[0], zip64_end_of_central_directory::size - 12);
zip64_end_of_central_directory::set_creator_version(&data[0], 45);
zip64_end_of_central_directory::set_minimum_required_version(&data[0], 45);
zip64_end_of_central_directory::set_disk_number(&data[0], 0);
zip64_end_of_central_directory::set_directory_start_disk(&data[0], 0);
zip64_end_of_central_directory::set_entries_on_disk(&data[0], num_files);
zip64_end_of_central_directory::set_total_entries(&data[0], num_files);
zip64_end_of_central_directory::set_directory_size(&data[0], central_directory.size());
zip64_end_of_central_directory::set_directory_offset(&data[0], current_offset);
output_file.write(&data[0], data.size());
boost::array<char, zip64_end_of_central_directory_locator::size> locator;
zip64_end_of_central_directory_locator::set_signature(&locator[0], zip64_end_of_central_directory_locator::signature);
zip64_end_of_central_directory_locator::set_end_of_directory_disk(&locator[0], 0);
zip64_end_of_central_directory_locator::set_end_of_directory_offset(&locator[0], current_offset + central_directory.size());
zip64_end_of_central_directory_locator::set_total_disks(&locator[0], 1);
output_file.write(&locator[0], locator.size());
std::vector<char> end(22);
end_of_central_directory::set_signature(&end[0], end_of_central_directory::signature);
end_of_central_directory::set_disk_number(&end[0], 0);
end_of_central_directory::set_directory_start_disk(&end[0], 0);
end_of_central_directory::set_entries_on_disk(&end[0], 0xFFFFu);
end_of_central_directory::set_total_entries(&end[0], 0xFFFFu);
end_of_central_directory::set_directory_size(&end[0], central_directory.size());
end_of_central_directory::set_directory_offset(&end[0], current_offset);
end_of_central_directory::set_comment_length(&end[0], 0);
output_file.write(&end[0], end.size());
} else {
std::vector<char> end(22);
end_of_central_directory::set_signature(&end[0], end_of_central_directory::signature);
end_of_central_directory::set_disk_number(&end[0], 0);
end_of_central_directory::set_directory_start_disk(&end[0], 0);
end_of_central_directory::set_entries_on_disk(&end[0], num_files);
end_of_central_directory::set_total_entries(&end[0], num_files);
end_of_central_directory::set_directory_size(&end[0], central_directory.size());
end_of_central_directory::set_directory_offset(&end[0], current_offset);
end_of_central_directory::set_comment_length(&end[0], 0);
output_file.write(&end[0], end.size());
}
}
private:
std::ostream& output_file;
std::vector<char> central_directory;
std::streamsize current_offset;
std::size_t num_files;
// little endian
struct local_file_header {
static const boost::uint32_t signature = 0x04034b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(flags, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(compression_method, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(modification_time, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(modification_date, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 14);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 18);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 22);
BOOST_ZIP_DEFINE_HEADER(filename_size, boost::uint16_t, 26);
BOOST_ZIP_DEFINE_HEADER(extra_size, boost::uint16_t, 28);
static char* filename(void* header) {
return static_cast<char*>(header) + 30;
}
const char* filename(const void* header) {
return static_cast<const char*>(header) + 30;
}
};
struct data_descriptor {
// The signature may or may not be present
static const boost::uint32_t signature = 0x08074b50u;
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 4);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 8);
// FIXME: handle skipping the signature automatically
};
// Not implemented Archive decryption header
// Not implemented Archive extra data record
struct central_directory_entry {
static const boost::uint32_t signature = 0x02014b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(creator_version, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(flags, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(compression_method, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(modification_time, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(modification_date, boost::uint16_t, 14);
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 20);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 24);
BOOST_ZIP_DEFINE_HEADER(filename_size, boost::uint16_t, 28);
BOOST_ZIP_DEFINE_HEADER(extra_size, boost::uint16_t, 30);
BOOST_ZIP_DEFINE_HEADER(comment_size, boost::uint16_t, 32);
BOOST_ZIP_DEFINE_HEADER(file_start_disk, boost::uint16_t, 34);
BOOST_ZIP_DEFINE_HEADER(internal_attributes, boost::uint16_t, 36);
BOOST_ZIP_DEFINE_HEADER(external_attributes, boost::uint32_t, 38);
BOOST_ZIP_DEFINE_HEADER(local_header_offset, boost::uint32_t, 42);
// TODO: filename, extra, comment
};
struct digital_signature {
static const boost::uint32_t signature = 0x05054b50;
BOOST_ZIP_DEFINE_HEADER(data_size, boost::uint16_t, 4);
// TODO: data
};
struct zip64_end_of_central_directory {
static const boost::uint32_t signature = 0x06064b50u;
// The value stored into the "size of zip64 end of central
// directory record" should be the size of the remaining
// record and should not include the leading 12 bytes.
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(size, boost::uint64_t, 4);
BOOST_ZIP_DEFINE_HEADER(creator_version, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 14);
BOOST_ZIP_DEFINE_HEADER(disk_number, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(directory_start_disk, boost::uint32_t, 20);
BOOST_ZIP_DEFINE_HEADER(entries_on_disk, boost::uint64_t, 24);
BOOST_ZIP_DEFINE_HEADER(total_entries, boost::uint64_t, 32);
BOOST_ZIP_DEFINE_HEADER(directory_size, boost::uint64_t, 40);
BOOST_ZIP_DEFINE_HEADER(directory_offset, boost::uint64_t, 48);
static const size_t size = 56;
// TODO: data
// Header ID - 2 bytes
// Data Size - 4 bytes
};
// H
struct zip64_end_of_central_directory_locator {
static const boost::uint32_t signature = 0x07064b50;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(end_of_directory_disk, boost::uint32_t, 4);
BOOST_ZIP_DEFINE_HEADER(end_of_directory_offset, boost::uint64_t, 8);
BOOST_ZIP_DEFINE_HEADER(total_disks, boost::uint32_t, 16);
static const size_t size = 20;
};
struct end_of_central_directory {
static const uint32_t signature = 0x06054b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(disk_number, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(directory_start_disk, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(entries_on_disk, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(total_entries, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(directory_size, boost::uint32_t, 12);
BOOST_ZIP_DEFINE_HEADER(directory_offset, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(comment_length, boost::uint16_t, 20);
};
public:
struct version {
static const boost::uint16_t system_mask = 0xFF00u;
static const boost::uint16_t ms_dos = 0u << 8;
static const boost::uint16_t amiga = 1u << 8;
static const boost::uint16_t open_vms = 2u << 8;
static const boost::uint16_t unix_ = 3u << 8;
static const boost::uint16_t vm_cms = 4u << 8;
static const boost::uint16_t atari_st = 5u << 8;
static const boost::uint16_t os_2_hpfs = 6u << 8;
static const boost::uint16_t macintosh = 7u << 8;
static const boost::uint16_t z_system = 8u << 8;
static const boost::uint16_t cp_m = 9u << 8;
static const boost::uint16_t windows_ntfs = 10u << 8;
static const boost::uint16_t mvs = 11u << 8;
static const boost::uint16_t vse = 12u << 8;
static const boost::uint16_t acorn_risc = 13u << 8;
static const boost::uint16_t vfat = 14u << 8;
static const boost::uint16_t alternate_mvs = 15u << 8;
static const boost::uint16_t beos = 16u << 8;
static const boost::uint16_t tandem = 17u << 8;
static const boost::uint16_t os_400 = 18u << 8;
static const boost::uint16_t darwin = 19u << 8;
// e.g. 62 = ZIP 6.2
static const boost::uint16_t zip_version_mask = 0xFFu;
static const boost::uint16_t default_ = 10;
static const boost::uint16_t file_is_volume_label = 11;
static const boost::uint16_t file_is_folder = 20;
static const boost::uint16_t file_is_compressed_with_deflate = 20;
static const boost::uint16_t zip64 = 45;
// TODO: ...
};
struct flags {
static const boost::uint16_t encrypted = 0x1u;
static const boost::uint16_t imploding_8k_dictionary = 0x2u;
static const boost::uint16_t imploding_3_shannon_faro = 0x4u;
static const boost::uint16_t deflating_options_mask = 0x6u;
static const boost::uint16_t deflating_normal = 0x0u;
static const boost::uint16_t deflating_maximum = 0x2u;
static const boost::uint16_t deflating_fast = 0x4u;
static const boost::uint16_t deflating_super_fast = 0x6u;
static const boost::uint16_t lzma_eos = 0x2u;
static const boost::uint16_t has_data_descriptor = 0x8u;
static const boost::uint16_t enhanced_deflating = 0x10;
static const boost::uint16_t strong_encryption = 0x20;
static const boost::uint16_t utf8 = 0x800;
static const boost::uint16_t mask_local_header_data = 0x2000;
};
struct compression_method {
static const boost::uint16_t none = 0;
static const boost::uint16_t shrink = 1;
static const boost::uint16_t reduce_1 = 2;
static const boost::uint16_t reduce_2 = 3;
static const boost::uint16_t reduce_3 = 4;
static const boost::uint16_t reduce_4 = 5;
static const boost::uint16_t implode = 6;
static const boost::uint16_t tokenizing = 7;
static const boost::uint16_t deflate = 8;
static const boost::uint16_t deflate64 = 9;
static const boost::uint16_t pkware_dcli = 10;
static const boost::uint16_t bzip2 = 12;
static const boost::uint16_t lzma = 14;
static const boost::uint16_t ibm_terse = 18;
static const boost::uint16_t lz77 = 19;
static const boost::uint16_t wavpack = 97;
static const boost::uint16_t ppmd_i_1 = 98;
};
struct internal_attributes {
static const boost::uint16_t ascii = 0x1;
};
struct header_id {
static const boost::uint16_t zip64 = 0x0001;
static const boost::uint16_t av_info = 0x0007;
//static const boost::uint16_t extended_language_encoding = 0x0008;
static const boost::uint16_t os_2 = 0x0009;
static const boost::uint16_t ntfs = 0x000a;
static const boost::uint16_t open_vms = 0x000c;
static const boost::uint16_t unix_ = 0x000d;
//static const boost::uint16_t file_stream = 0x000e;
static const boost::uint16_t patch_descriptor = 0x000f;
static const boost::uint16_t x509_certificate = 0x0014;
static const boost::uint16_t x509_certificate_id_file = 0x0015;
static const boost::uint16_t x509_certificate_id_directory = 0x0016;
static const boost::uint16_t strong_encryption_header = 0x0017;
static const boost::uint16_t record_management_controls = 0x0018;
static const boost::uint16_t encyption_recipients = 0x0019;
static const boost::uint16_t ibm_uncompressed = 0x0065;
static const boost::uint16_t ibm_compressed = 0x0066;
static const boost::uint16_t poszip4690 = 0x4690;
// TODO: Third party mappings
};
private:
struct zip64_extended_information {
BOOST_ZIP_DEFINE_HEADER(tag, boost::uint16_t, 0);
BOOST_ZIP_DEFINE_HEADER(size, boost::uint16_t, 2);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint64_t, 4);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint64_t, 12);
BOOST_ZIP_DEFINE_HEADER(local_header_offset, boost::uint64_t, 20);
BOOST_ZIP_DEFINE_HEADER(disk_start_number, boost::uint32_t, 28);
};
};
class shrink_filter : ::boost::noncopyable {
public:
typedef char char_type;
struct category :
::boost::iostreams::output_filter_tag,
::boost::iostreams::closable_tag
{};
shrink_filter()
{
memory = new lzw_node[1 << 13];
// no-throw from here on
code_size = 9;
for(int i = 0; i < (1 << code_size); ++i) {
initialize_node(i);
}
used_codes.set(256);
current_node = &root;
buf = 0;
pos = 0;
for(int i = 0; i < 256; ++i) {
root.children[i] = make_node(i);
}
next_code = 257;
}
~shrink_filter() {
delete[] memory;
}
template<class Sink>
bool put(Sink& sink, char ch) {
write_char(static_cast<unsigned char>(ch));
return do_write(sink);
}
template<class Sink>
void close(Sink& sink) {
if(current_node != &root) {
write_code(get_encoding(current_node));
current_node = &root;
}
do_write(sink);
if(pos != 0) {
::boost::iostreams::put(sink, buf & 0xFF);
pos = 0;
}
}
private:
template<class Sink>
bool do_write(Sink& sink) {
while(pos >= 8) {
if(!::boost::iostreams::put(sink, static_cast<char>(buf & 0xFF))) {
return false;
}
buf >>= 8;
pos -= 8;
}
return true;
}
void write_char(unsigned char ch) {
if(current_node->children[ch] != 0) {
current_node = current_node->children[ch];
} else {
int encoding = get_encoding(current_node);
write_code(encoding);
for(;; ++next_code) {
if(next_code == (1 << code_size)) {
if(code_size == 13) {
write_code(256);
write_code(2);
free_leaves();
next_code = 257;
} else {
write_code(256);
write_code(1);
increment_code_size();
}
}
if(!used_codes.test(next_code)) {
current_node->children[ch] = make_node(next_code);
++next_code;
break;
}
}
current_node = root.children[ch];
}
}
void write_code(int code) {
buf |= static_cast<boost::uint64_t>(code) << pos;
pos += code_size;
}
struct lzw_node {
lzw_node* children[256];
};
int get_encoding(lzw_node* node) const {
return node - memory;
}
bool free_leaves(lzw_node* node) {
bool result = true;
for(int i = 0; i < 256; ++i) {
if(node->children[i] != 0) {
result = false;
if(free_leaves(node->children[i])) {
destroy_node(node->children[i]);
node->children[i] = 0;
}
}
}
return result;
}
void increment_code_size() {
for(int i = (1 << code_size); i < (1 << (code_size + 1)); ++i) {
initialize_node(i);
}
++code_size;
}
void free_leaves() {
for(int i = 0; i < 256; ++i) {
free_leaves(root.children[i]);
}
}
void initialize_node(int encoding) {
lzw_node* result = memory + encoding;
for(int i = 0; i < 256; ++i) {
result->children[i] = 0;
}
}
lzw_node* make_node(int encoding = 0) {
assert(!used_codes.test(encoding));
lzw_node* result = memory + encoding;
assert(result >= memory);
assert(result < memory + (1 << code_size));
used_codes.set(encoding);
return result;
}
void destroy_node(lzw_node* node) {
used_codes.reset(get_encoding(node));
}
lzw_node* memory;
lzw_node root;
lzw_node* current_node;
int code_size;
int next_code;
::std::bitset<(1 << 13)> used_codes;
::boost::uint64_t buf;
int pos;
};
class deflate_filter : public ::boost::iostreams::zlib_compressor {
public:
deflate_filter() :
boost::iostreams::zlib_compressor(boost::iostreams::zlib_params(
boost::iostreams::zlib::default_compression,
boost::iostreams::zlib::deflated,
boost::iostreams::zlib::default_window_bits,
boost::iostreams::zlib::default_mem_level,
boost::iostreams::zlib::default_strategy,
true /* noheader */,
false /* crc */))
{}
};
class noop_filter
{
public:
typedef char char_type;
struct category :
::boost::iostreams::output_filter_tag,
::boost::iostreams::multichar_tag
{};
template<class Device>
std::streamsize write(Device& dev, const char * data, std::streamsize size) {
return boost::iostreams::write(dev, data, size);
}
};
template<class Filter>
struct compression_method;
template<>
struct compression_method< ::boost::zip::noop_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::none
>
{};
template<>
struct compression_method< ::boost::zip::shrink_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::shrink
>
{};
template<>
struct compression_method< ::boost::zip::deflate_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::deflate
>
{};
template<class Filter>
class zip_member_sink {
public:
typedef char char_type;
struct category :
::boost::iostreams::sink_tag,
::boost::iostreams::closable_tag
{};
zip_member_sink(zip_archive& archive, const std::string& path)
: file(archive, path, 10, 10, 0,
compression_method<Filter>::value) {}
~zip_member_sink() {
close();
}
::std::streamsize write(const char* data, ::std::streamsize size) {
file.write_uncompressed(data, size);
::boost::iostreams::write(filter, file, data, size);
return size;
}
void close() {
::boost::iostreams::close(filter, file, ::std::ios_base::out);
::boost::iostreams::close(file);
}
private:
zip_archive::file_handle file;
Filter filter;
};
typedef zip_member_sink<shrink_filter> shrink_sink;
typedef zip_member_sink<deflate_filter> deflate_sink;
typedef zip_member_sink<noop_filter> nocompression_sink;
}
}
#endif

View File

@@ -1,71 +0,0 @@
#!/usr/bin/python
# Copyright Rene Rivera 2007-2013
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os
import os.path
import shutil
import sys
import urllib
#~ Using --skip-script-download is useful to avoid repeated downloading of
#~ the regression scripts when doing the regression commands individually.
no_update_argument = "--skip-script-download"
no_update = no_update_argument in sys.argv
if no_update:
del sys.argv[sys.argv.index(no_update_argument)]
use_local_argument = '--use-local'
use_local = use_local_argument in sys.argv
if use_local:
del sys.argv[sys.argv.index(use_local_argument)]
#~ The directory this file is in.
if use_local:
root = os.path.abspath(os.path.realpath(os.path.curdir))
else:
root = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
print '# Running regressions in %s...' % root
script_sources = [ 'collect_and_upload_logs.py', 'process_jam_log.py', 'regression.py' ]
script_local = root
if use_local:
script_remote = 'file://'+os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
else:
script_remote = 'https://raw.githubusercontent.com/boostorg/regression/develop/src'
script_dir = os.path.join(root,'boost_regression_src')
if not no_update:
#~ Bootstrap.
#~ * Clear out any old versions of the scripts
print '# Creating regression scripts at %s...' % script_dir
if os.path.exists(script_dir):
shutil.rmtree(script_dir)
os.mkdir(script_dir)
#~ * Get new scripts, either from local working copy, or from remote
if use_local and os.path.exists(script_local):
print '# Copying regression scripts from %s...' % script_local
for src in script_sources:
shutil.copyfile( os.path.join(script_local,src), os.path.join(script_dir,src) )
else:
print '# Downloading regression scripts from %s...' % script_remote
proxy = None
for a in sys.argv[1:]:
if a.startswith('--proxy='):
proxy = {'https' : a.split('=')[1] }
print '--- %s' %(proxy['https'])
break
for src in script_sources:
urllib.FancyURLopener(proxy).retrieve(
'%s/%s' % (script_remote,src), os.path.join(script_dir,src) )
#~ * Make the scripts available to Python
sys.path.insert(0,os.path.join(root,'boost_regression_src'))
#~ Launch runner.
from regression import runner
runner(root)

View File

@@ -1,197 +0,0 @@
#!/bin/sh
#
# Copyright John Maddock
# Copyright Rene Rivera
#
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
#
# shell script for running the boost regression test suite and generating
# a html table of results.
# Set the following variables to configure the operation. Variables you
# should set, i.e. usually required are listed first. Optional variables
# have reasonable defaults for most situations.
### THESE SHOULD BE CHANGED!
#
# "boost_root" points to the root of you boost installation:
# This can be either a non-exitent directory or an already complete Boost
# source tree.
#
boost_root="$HOME/CVSROOTs/Boost/boost_regression"
#
# Wether to fetch the most current Boost code from CVS (yes/no):
# There are two contexts to use this script in: on an active Boost CVS
# tree, and on a fresh Boost CVS tree. If "yes" is specified here an attempt
# to fetch the latest CVS Boost files is made. For an active Boost CVS
# the CVS connection information is used. If an empty tree is detected
# the code is fetched with the anonymous read only information.
#
cvs_update=no
#
# "test_tools" are the Boost.Build toolsets to use for building and running the
# regression tests. Specify a space separated list, of the Boost.Build toolsets.
# Each will be built and tested in sequence.
#
test_tools=gcc
#
# "toolset" is the Boost.Build toolset to use for building the helper programs.
# This is usually different than the toolsets one is testing. And this is
# normally a toolset that corresponds to the compiler built into your platform.
#
toolset=gcc
#
# "comment_path" is the path to an html-file describing the test environment.
# The content of this file will be embedded in the status pages being produced.
#
comment_path="$boost_root/../regression_comment.html"
#
# "test_dir" is the relative path to the directory to run the tests in,
# defaults to "status" and runs all the tests, but could be a sub-directory
# for example "libs/regex/test" to run the regex tests alone.
#
test_dir="status"
### DEFAULTS ARE OK FOR THESE.
#
# "exe_suffix" the suffix used by exectable files:
# In case your platform requires use of a special suffix for executables specify
# it here, including the "." if needed. This should not be needed even in Windows
# like platforms as they will execute without the suffix anyway.
#
exe_suffix=
#
# "bjam" points to your built bjam executable:
# The location of the binary for running bjam. The default should work
# under most circumstances.
#
bjam="$boost_root/tools/build/v2/engine/bin/bjam$exe_suffix"
#
# "process_jam_log", and "compiler_status" paths to built helper programs:
# The location of the executables of the regression help programs. These
# are built locally so the default should work in most situations.
#
process_jam_log="$boost_root/dist/bin/process_jam_log$exe_suffix"
compiler_status="$boost_root/dist/bin/compiler_status$exe_suffix"
#
# "boost_build_path" can point to additional locations to find toolset files.
#
boost_build_path="$HOME/.boost-build"
### NO MORE CONFIGURABLE PARTS.
#
# Some setup.
#
boost_dir=`basename "$boost_root"`
if test -n "${BOOST_BUILD_PATH}" ; then
BOOST_BUILD_PATH="$boost_build_path:$BOOST_BUILD_PATH"
else
BOOST_BUILD_PATH="$boost_build_path"
fi
export BOOST_BUILD_PATH
#
# STEP 0:
#
# Get the source code:
#
if test ! -d "$boost_root" ; then
mkdir -p "$boost_root"
if test $? -ne 0 ; then
echo "creation of $boost_root directory failed."
exit 256
fi
fi
if test $cvs_update = yes ; then
echo fetching Boost:
echo "/1 :pserver:anonymous@cvs.sourceforge.net:2401/cvsroot/boost A" >> "$HOME/.cvspass"
cat "$HOME/.cvspass" | sort | uniq > "$HOME/.cvspass"
cd `dirname "$boost_root"`
if test -f boost/CVS/Root ; then
cvs -z3 -d `cat "$boost_dir/CVS/Root"` co -d "$boost_dir" boost
else
cvs -z3 -d :pserver:anonymous@cvs.sourceforge.net:2401/cvsroot/boost co -d "$boost_dir" boost
fi
fi
#
# STEP 1:
# rebuild bjam if required:
#
echo building bjam:
cd "$boost_root/tools/build/v2/engine" && \
LOCATE_TARGET=bin sh ./build.sh
if test $? != 0 ; then
echo "bjam build failed."
exit 256
fi
#
# STEP 2:
# rebuild the regression test helper programs if required:
#
echo building regression test helper programs:
cd "$boost_root/tools/regression/build" && \
"$bjam" $toolset release
if test $? != 0 ; then
echo "helper program build failed."
exit 256
fi
#
# STEP 5:
# repeat steps 3 and 4 for each additional toolset:
#
for tool in $test_tools ; do
#
# STEP 3:
# run the regression tests:
#
echo running the $tool regression tests:
cd "$boost_root/$test_dir"
"$bjam" $tool --dump-tests 2>&1 | tee regress.log
#
# STEP 4:
# post process the results:
#
echo processing the regression test results for $tool:
cat regress.log | "$process_jam_log" --v2
if test $? != 0 ; then
echo "Failed regression log post processing."
exit 256
fi
done
#
# STEP 6:
# create the html table:
#
uname=`uname`
echo generating html tables:
"$compiler_status" --v2 --comment "$comment_path" "$boost_root" cs-$uname.html cs-$uname-links.html
if test $? != 0 ; then
echo "Failed HTML result table generation."
exit 256
fi
echo "done!"

View File

@@ -1,197 +0,0 @@
# smoke test - every so many minutes, check svn revision, and if changed:
# update working copy, run tests, upload results
# Copyright Beman Dawes 2007
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# ---------------------------------------------------------------------------- #
import os
import sys
import platform
import time
import ftplib
# invoke the system command line processor
def cmd(command):
print "command:", command
os.system(command)
# update SVN working copy
def update_working_copy(boost_path):
os.chdir(boost_path)
cmd("svn update")
# get repository url
def repository_url(path, results_path):
url = ""
svn_info_file = results_path + "/svn_info.xml"
command = "svn info --xml " + path + " >" + svn_info_file
cmd(command)
f = open( svn_info_file, 'r' )
svn_info = f.read()
f.close()
i = svn_info.find('//svn.boost.org')
if i >= 0:
url = svn_info[i:svn_info.find("</url>")]
return url
# get revision number of a path, which may be a filesystem path or URL
def revision(path, results_path, test_name):
rev = 0
svn_info_file = results_path + "/" + test_name + "-svn_info.xml"
command = "svn info --xml " + path + " >" + svn_info_file
cmd(command)
f = open( svn_info_file, 'r' )
svn_info = f.read()
f.close()
i = svn_info.find( 'revision=' )
if i >= 0:
i += 10
while svn_info[i] >= '0' and svn_info[i] <= '9':
rev = rev*10 + int(svn_info[i])
i += 1
return rev
# run bjam in current directory
def bjam(boost_path, args, output_path, test_name):
# bjam seems to need BOOST_BUILD_PATH
#os.environ["BOOST_BUILD_PATH"]=boost_path + "/tools/build/v2"
print "Begin bjam..."
command = "bjam --v2 --dump-tests -l180"
if args != "": command += " " + args
command += " >" + output_path + "/" + test_name +"-bjam.log 2>&1"
cmd(command)
# run process_jam_log in current directory
def process_jam_log(boost_path, output_path, test_name):
print "Begin log processing..."
command = "process_jam_log " + boost_path + " <" +\
output_path + "/" + test_name +"-bjam.log"
cmd(command)
# run compiler_status in current directory
def compiler_status(boost_path, output_path, test_name):
print "Begin compiler status html creation... "
command = "compiler_status --v2 --ignore-pass --no-warn --locate-root " + boost_path + " " +\
boost_path + " " + output_path + "/" + test_name + "-results.html " +\
output_path + "/" + test_name + "-details.html "
cmd(command)
# upload results via ftp
def upload_to_ftp(results_path, test_name, ftp_url, user, psw, debug_level):
# to minimize the time web pages are not available, upload with temporary
# names and then rename to the permanent names
i = 0 # dummy variable
os.chdir(results_path)
tmp_results = "temp-" + test_name + "-results.html"
results = test_name + "-results.html"
tmp_details = "temp-" + test_name + "-details.html"
details = test_name + "-details.html"
print "Uploading results via ftp..."
ftp = ftplib.FTP( ftp_url, user, psw )
ftp.set_debuglevel( debug_level )
# ftp.cwd( site_path )
try: ftp.delete(tmp_results)
except: ++i
f = open( results, 'rb' )
ftp.storbinary( 'STOR %s' % tmp_results, f )
f.close()
try: ftp.delete(tmp_details)
except: ++i
f = open( details, 'rb' )
ftp.storbinary( 'STOR %s' % tmp_details, f )
f.close()
try: ftp.delete(results)
except: ++i
try: ftp.delete(details)
except: ++i
ftp.rename(tmp_results, results)
ftp.rename(tmp_details, details)
ftp.dir()
ftp.quit()
def commit_results(results_path, test_name, rev):
print "Commit results..."
cwd = os.getcwd()
os.chdir(results_path)
command = "svn commit --non-interactive -m "+'"'+str(rev)+'" '+test_name+"-results.html"
cmd(command)
os.chdir(cwd)
# ---------------------------------------------------------------------------- #
if len(sys.argv) < 7:
print "Invoke with: minutes boost-path test-name results-path ftp-url user psw [bjam-args]"
print " boost-path must be path for a boost svn working directory."
print " results-path must be path for a svn working directory where an"
print " svn commit test-name+'-results.html' is valid."
print "Warning: This program hangs or crashes on network failures."
exit()
minutes = int(sys.argv[1])
boost_path = sys.argv[2]
test_name = sys.argv[3]
results_path = sys.argv[4]
ftp_url = sys.argv[5]
user = sys.argv[6]
psw = sys.argv[7]
if len(sys.argv) > 8: bjam_args = sys.argv[8]
else: bjam_args = ""
os.chdir(boost_path) # convert possible relative path
boost_path = os.getcwd() # to absolute path
print "minutes is ", minutes
print "boost_path is ", boost_path
print "test_name is ", test_name
print "results_path is ", results_path
print "ftp_url is ", ftp_url
print "user is ", user
print "psw is ", psw
print 'bjam args are "' + bjam_args + '"'
url = repository_url(boost_path, results_path)
print "respository url is ", url
first = 1
while 1:
working_rev = revision(boost_path, results_path, test_name)
repos_rev = revision("http:" + url, results_path, test_name)
print "Working copy revision: ", working_rev, " repository revision: ", repos_rev
if first or working_rev != repos_rev:
first = 0
start_time = time.time()
print
print "start at", time.strftime("%H:%M:%S", time.localtime())
update_working_copy(boost_path)
os.chdir(boost_path+"/status")
bjam(boost_path, bjam_args, results_path, test_name)
process_jam_log(boost_path, results_path, test_name)
compiler_status(boost_path, results_path, test_name)
upload_to_ftp(results_path, test_name, ftp_url, user, psw, 0)
commit_results(results_path, test_name,revision(boost_path, results_path, test_name))
elapsed_time = time.time() - start_time
print elapsed_time/60.0, "minutes elapsed time"
print
print "sleep ", minutes, "minutes..."
time.sleep(60 * minutes)

View File

@@ -1,32 +0,0 @@
# Copyright Misha Bergal 2006
#
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
# Test naming convention: the portion of the name before the hyphen ('-')
# identifies the bjam test type. The portion after the hyphen
# identifies the correct result to be reported by compiler_status.
test-suite testlib :
[ compile-fail comp-fail_fail.cpp ]
[ compile-fail comp-fail_pass.cpp ]
[ compile comp_fail.cpp ]
[ compile comp_pass.cpp ]
[ compile comp_warn.cpp ]
# The link test .cpp files were apparently never committed to the repository,
# and were lost.
# [ link link_fail.cpp ]
# [ link link_pass.cpp ]
# [ link-fail link-fail_fail.cpp ]
# [ link-fail link-fail_pass.cpp ]
[ run-fail run_comp-fail.cpp ]
[ run-fail run-fail_comp-fail.cpp ]
[ run-fail run-fail_fail-warn.cpp ]
[ run-fail run-fail_fail.cpp ]
[ run-fail run-fail_pass.cpp ]
[ run run_fail.cpp ]
[ run run_note.cpp ]
[ run run_pass.cpp ]
[ run run_warn-note.cpp ]
[ run run_warn.cpp ]
;

View File

@@ -1,10 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
int main() { return 0; }

View File

@@ -1,9 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#error example of a compile failure

View File

@@ -1,9 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#error example of a compile failure

View File

@@ -1,9 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
int main() { return 0; }

View File

@@ -1,18 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
// provoke one or more compiler warnings
int main(int argc, char * argv[] )
{
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
return 0;
}

View File

@@ -1,28 +0,0 @@
# Copyright (c) 2013 Steven Watanabe
#
# Distributed under the Boost Software Licence Version 1.0. (See
# accompanying file LICENCE_1_0.txt or copy at
# http://www.boost.org/LICENCE_1_0.txt)
path-constant expected-results : expected_results.xml ;
path-constant failures-markup : explicit-failures-markup.xml ;
path-constant css : ../../xsl_reports/xsl/v2/html/master.css ;
path-constant comment : comment.html ;
run ../../build//boost_report :
--expected=\"$(expected-results)\"
--markup=\"$(failures-markup)\"
-rl -rdd -rds -ri -rn
--css=\"$(css)\"
--tag=trunk
--run-date=\"2013-Mar-26 17:23:08\"
--comment=\"$(comment)\"
:
runner.xml
:
<dependency>runner.xml
<dependency>comment.html
<dependency>expected_results.xml
<dependency>explicit-failures-markup.xml
:
boost_report_test
;

View File

@@ -1,63 +0,0 @@
<table style="border-spacing: 0.5em;">
<tr>
<td style="vertical-align: top;"><tt>uname</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
Linux localhost.localdomain 3.8.3-203.fc18.x86_64 #1 SMP Mon Mar 18 12:59:28 UTC 2013 x86_64 x86_64 x86_64 GNU/Linux
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>uptime</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
06:29:47 up 3 min, 2 users, load average: 0.16, 0.17, 0.07
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>vmstat</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
procs -----------memory---------- ---swap-- -----io---- -system-- ----cpu----
r b swpd free buff cache si so bi bo in cs us sy id wa
0 0 0 1264828 41068 324004 0 0 901 43 274 457 4 6 89 1
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>xsltproc</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
Using libxml 20900, libxslt 10127 and libexslt 816
xsltproc was compiled against libxml 20900, libxslt 10127 and libexslt 816
libxslt 10127 was compiled against libxml 20900
libexslt 816 was compiled against libxml 20900
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>python</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
Python 2.7.3
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">previous run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
Tue Mar 26 04:06:43 UTC 2013
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">current run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
Tue Mar 26 13:29:47 UTC 2013
</pre>
</td>
</tr>
</table>

Binary file not shown.

View File

@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<expected-failures>
<!--
Copyright (c) 2013 Steven Watanabe
Distributed under the Boost Software Licence Version 1.0. (See
accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
-->
<toolset name="msvc-11.0">
<library name="any">
<test-result test-name="run_pass" result="success"/>
<test-result test-name="run_fail" result="fail"/>
<test-result test-name="run_fail_regress" result="success"/>
<test-result test-name="run_pass_unexpected" result="fail"/>
</library>
</toolset>
</expected-failures>

View File

@@ -1,51 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<explicit-failures-markup>
<!--
Copyright (c) 2013 Steven Watanabe
Distributed under the Boost Software Licence Version 1.0. (See
accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
-->
<!-- /////////////// Toolsets /////////////// -->
<mark-toolset name="msvc-11.0" status="required"/>
<!-- accumulators -->
<library name="any">
<mark-expected-failures>
<test name="run_fail_expected_new"/>
<toolset name="msvc*"/>
<note author="Steven Watanabe" refid="1"/>
</mark-expected-failures>
<mark-expected-failures>
<test name="run_fail_expected_new_nonote"/>
<toolset name="msvc*"/>
</mark-expected-failures>
<mark-expected-failures reason="?">
<test name="run_fail_expected_new_unsearched"/>
<toolset name="msvc*"/>
</mark-expected-failures>
<mark-expected-failures>
<test name="run_pass_unexpected_new"/>
<toolset name="msvc*"/>
<note author="Steven Watanabe" refid="1"/>
</mark-expected-failures>
<mark-expected-failures>
<test name="run_pass_unexpected_new_nonote"/>
<toolset name="msvc*"/>
</mark-expected-failures>
</library>
<library name="lambda">
<mark-unusable>
<toolset name="msvc-11.0"/>
<note author="Steven Watanabe">The library requires features that are not supported by this compiler</note>
</mark-unusable>
</library>
<note id="1">
The failure is caused by a compiler bug.
</note>
</explicit-failures-markup>

View File

@@ -1,132 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<test-run source="SVN" runner="runner1" timestamp="2013-03-26T14:23:59Z" platform="Windows" tag="trunk" run-type="full" revision="83542">
<!--
Copyright (c) 2013 Steven Watanabe
Distributed under the Boost Software Licence Version 1.0. (See
accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
-->
<comment>&lt;h1&gt;runner&lt;/h1&gt;
&lt;p&gt;Tests are run on the Windows platform&lt;/p&gt;</comment>
<test-log toolset="msvc-11.0" library="any" test-name="run_pass"
test-program="libs/any/test/run_pass.cpp"
target-directory="boost/bin.v2/libs/any/test/run_pass.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail_expected_new"
test-program="libs/any/test/run_fail_expected_new.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail_expected_new.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail"
test-program="libs/any/test/run_fail.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail_expected_new_nonote"
test-program="libs/any/test/run_fail_expected_new_nonote.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail_expected_new_nonote.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_nonote.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_nonote</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_nonote failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail_expected_new_unsearched"
test-program="libs/any/test/run_fail_expected_new_unsearched.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail_expected_new_unsearched.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_unsearched.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_unsearched</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_expected_new_unsearched failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail_new"
test-program="libs/any/test/run_fail_new.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail_new.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_new.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_new</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_new failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_fail_regress"
test-program="libs/any/test/run_fail_regress.cpp"
target-directory="boost/bin.v2/libs/any/test/run_fail_regress.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_regress.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_regress</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_regress failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_pass_unexpected_new"
test-program="libs/any/test/run_pass_unexpected_new.cpp"
target-directory="boost/bin.v2/libs/any/test/run_pass_unexpected_new.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_pass_unexpected"
test-program="libs/any/test/run_pass_unexpected.cpp"
target-directory="boost/bin.v2/libs/any/test/run_pass_unexpected.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="any" test-name="run_pass_unexpected_new_nonote"
test-program="libs/any/test/run_pass_unexpected_new_nonote.cpp"
target-directory="boost/bin.v2/libs/any/test/run_pass_unexpected_new_nonote.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new_nonote.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new_nonote</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_unexpected_new_nonote passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="variant" test-name="run_pass_both"
test-program="libs/variant/test/run_pass_both.cpp"
target-directory="boost/bin.v2/libs/variant/test/run_pass_both.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="variant" test-name="run_pass_both"
test-program="libs/variant/test/run_pass_both.cpp"
target-directory="boost/bin.v2/libs/variant/test/run_pass_both.test/msvc-11.0/release"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_pass_both passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="variant" test-name="run_fail_one"
test-program="libs/variant/test/run_fail_one.cpp"
target-directory="boost/bin.v2/libs/variant/test/run_fail_one.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one</link>
<run result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one passed</run>
</test-log>
<test-log toolset="msvc-11.0" library="variant" test-name="run_fail_one"
test-program="libs/variant/test/run_fail_one.cpp"
target-directory="boost/bin.v2/libs/variant/test/run_fail_one.test/msvc-11.0/release"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">run_fail_one failed</run>
</test-log>
<test-log toolset="msvc-11.0" library="lambda" test-name="unsupported"
test-program="libs/variant/test/unsupported.cpp"
target-directory="boost/bin.v2/libs/lambda/test/unsupported.test/msvc-11.0/debug"
test-type="run" show-run-output="false">
<compile result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one.cpp</compile>
<link result="succeed" timestamp="2013-03-26 14:47:12 UTC">run_fail_one</link>
<run result="fail" timestamp="2013-03-26 14:47:12 UTC">unsupported failed</run>
</test-log>
</test-run>

View File

@@ -1,9 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#error example of a compile failure

View File

@@ -1,16 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
int main()
{
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
return 0;
}

View File

@@ -1,12 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
int main()
{
return 0;
}

View File

@@ -1,15 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
std::cout << "example of output from a run-time failure\n";
return 1;
}

View File

@@ -1,20 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
std::cout << "example of output from a run-time failure\n";
return 1;
}

View File

@@ -1,9 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#error example of a compile failure

View File

@@ -1,17 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
std::cout << "example of output before a <note> line\n";
std::cout << "<note>\n";
std::cout << "example of output after a <note> line\n";
return 1;
}

View File

@@ -1,20 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
std::cout << "example of output from a run-time failure\n";
return 1;
}

View File

@@ -1,14 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
return 1;
}

View File

@@ -1,17 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
std::cout << "example of output before a <note> line\n";
std::cout << "<note>\n";
std::cout << "example of output after a <note> line\n";
return 0;
}

View File

@@ -1,12 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
int main()
{
return 0;
}

View File

@@ -1,24 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
#include <iostream>
int main()
{
std::cout << "example of output before a <note> line\n";
std::cout << "<note>\n";
std::cout << "example of output after a <note> line\n";
// provoke a compiler warning to make sure <note> takes priority over
// a warning, but neither is lost from status reporting links HTML.
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
return 0;
}

View File

@@ -1,18 +0,0 @@
// (C) Copyright Beman Dawes 2003. Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Test naming convention: the portion of the name before the hyphen ('-')
// identifies the bjam test type. The portion after the hyphen
// identifies the correct result to be reported by compiler_status.
// provoke one or more compiler warnings
int main(int argc, char * argv[] )
{
short s;
unsigned long ul;
s = s & ul; // warning from many compilers
if ( s == ul ) {} // warning from GCC
return 0;
}

View File

@@ -1,36 +0,0 @@
boost-test(RUN) "statechart/DllTestNative" : "libs/statechart/test/TuTestMain.cpp"
boost-test(RUN) "statechart/DllTestNormal" : "libs/statechart/test/TuTestMain.cpp"
compile-c-c++ ..\..\..\bin.v2\libs\statechart\test\DllTestNormal.test\msvc-7.1\debug\threading-multi\TuTestMain.obj
TuTestMain.cpp
c:\Users\Misha\Stuff\boost\HEAD\boost\libs\statechart\test\TuTest.hpp(36) : warning C4275: non dll-interface class 'boost::statechart::event_base' used as base for dll-interface class 'boost::statechart::detail::rtti_policy::rtti_derived_type<MostDerived,Base>'
with
[
MostDerived=EvX,
Base=boost::statechart::event_base
]
..\..\..\boost\statechart\event_base.hpp(49) : see declaration of 'boost::statechart::event_base'
compile-c-c++ ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLibTuTest.obj
TuTest.cpp
c:\Users\Misha\Stuff\boost\HEAD\boost\libs\statechart\test\TuTest.hpp(36) : warning C4275: non dll-interface class 'boost::statechart::event_base' used as base for dll-interface class 'boost::statechart::detail::rtti_policy::rtti_derived_type<MostDerived,Base>'
with
[
MostDerived=EvX,
Base=boost::statechart::event_base
]
..\..\..\boost\statechart\event_base.hpp(49) : see declaration of 'boost::statechart::event_base'
msvc.link.dll ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib
Creating library ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib and object ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.exp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
link /NOLOGO /INCREMENTAL:NO /DLL /DEBUG /subsystem:console /out:"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll" /IMPLIB:"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib" @"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.rsp"
if %errorlevel% 1 exit %errorlevel%
if exist "..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.manifest" (
mt -nologo -manifest "..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.manifest" "-outputresource:..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll;2"
)
...failed msvc.link.dll ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib...
...removing ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll
...removing ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib
...skipped <p..\..\..\bin.v2\libs\statechart\test\DllTestNormal.test\msvc-7.1\debug\threading-multi>DllTestNormal.exe for lack of <p..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi>DllTestNormalLib-vc71-mt-gd-1_35.lib...
...skipped <p..\..\..\bin.v2\libs\statechart\test\DllTestNormal.test\msvc-7.1\debug\threading-multi>DllTestNormal.run for lack of <p..\..\..\bin.v2\libs\statechart\test\DllTestNormal.test\msvc-7.1\debug\threading-multi>DllTestNormal.exe...

View File

@@ -1,27 +0,0 @@
<test-log library="statechart" test-name="DllTestNormal" test-type="run" test-program="libs/statechart/test/TuTestMain.cpp" target-directory="bin.v2/libs/statechart/test/DllTestNormal.test/msvc-7.1/debug/threading-multi" toolset="msvc-7.1" show-run-output="false">
<lib result="fail" timestamp="">../../bin.v2/libs/statechart/test/msvc-7.1/debug/threading-multi</lib>
</test-log>
<test-log library="statechart" test-name="" test-type="" test-program="" target-directory="bin.v2/libs/statechart/test/msvc-7.1/debug/threading-multi" toolset="" show-run-output="true">
<compile result="succeed" timestamp="">
TuTest.cpp
c:\Users\Misha\Stuff\boost\HEAD\boost\libs\statechart\test\TuTest.hpp(36) : warning C4275: non dll-interface class 'boost::statechart::event_base' used as base for dll-interface class 'boost::statechart::detail::rtti_policy::rtti_derived_type&lt;MostDerived,Base&gt;'
with
[
MostDerived=EvX,
Base=boost::statechart::event_base
]
..\..\..\boost\statechart\event_base.hpp(49) : see declaration of 'boost::statechart::event_base'
</compile>
<link result="fail" timestamp="">
Creating library ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib and object ..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.exp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
link /NOLOGO /INCREMENTAL:NO /DLL /DEBUG /subsystem:console /out:"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll" /IMPLIB:"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.lib" @"..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.rsp"
if %errorlevel% 1 exit %errorlevel%
if exist "..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.manifest" (
mt -nologo -manifest "..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll.manifest" "-outputresource:..\..\..\bin.v2\libs\statechart\test\msvc-7.1\debug\threading-multi\DllTestNormalLib-vc71-mt-gd-1_35.dll;2"
)
</link>
</test-log>

View File

@@ -1,325 +0,0 @@
locate-root "..\..\..\bin.v2"
C:\Users\Misha\Stuff\boost\HEAD\boost\tools\regression\test>C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost\tools\jam\src\bin.ntx86\bjam.exe --dump-tests --v2 msvc-7.1 "-sBOOST_BUILD_PATH=C:\Users\Misha\Stuff\boost\HEAD\bin\.." "-sBOOST_ROOT="C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost"
warning: Python location is not configured
warning: the Boost.Python library won't be built
Building Boost.Regex with the optional Unicode/ICU support disabled.
Please refer to the Boost.Regex documentation for more information
(and if you don't know what ICU is then you probably don't need it).
boost-test(RUN) "testlib/run~warn" : "tools/regression/test/run~warn.cpp"
boost-test(RUN) "testlib/run~warn-note" : "tools/regression/test/run~warn-note.cpp"
boost-test(RUN) "testlib/run~pass" : "tools/regression/test/run~pass.cpp"
boost-test(RUN) "testlib/run~note" : "tools/regression/test/run~note.cpp"
boost-test(RUN) "testlib/run~fail" : "tools/regression/test/run~fail.cpp"
boost-test(RUN_FAIL) "testlib/run-fail~pass" : "tools/regression/test/run-fail~pass.cpp"
boost-test(RUN_FAIL) "testlib/run-fail~fail" : "tools/regression/test/run-fail~fail.cpp"
boost-test(RUN_FAIL) "testlib/run-fail~fail-warn" : "tools/regression/test/run-fail~fail-warn.cpp"
boost-test(RUN_FAIL) "testlib/run-fail~compile-fail" : "tools/regression/test/run-fail~compile-fail.cpp"
boost-test(LINK_FAIL) "testlib/link-fail~pass" : "tools/regression/test/link-fail~pass.cpp"
boost-test(LINK_FAIL) "testlib/link-fail~fail" : "tools/regression/test/link-fail~fail.cpp"
boost-test(LINK) "testlib/link~pass" : "tools/regression/test/link~pass.cpp"
boost-test(LINK) "testlib/link~fail" : "tools/regression/test/link~fail.cpp"
boost-test(COMPILE) "testlib/compile~warn" : "tools/regression/test/compile~warn.cpp"
boost-test(COMPILE) "testlib/compile~pass" : "tools/regression/test/compile~pass.cpp"
boost-test(COMPILE) "testlib/compile~fail" : "tools/regression/test/compile~fail.cpp"
boost-test(COMPILE_FAIL) "testlib/compile-fail~pass" : "tools/regression/test/compile-fail~pass.cpp"
boost-test(COMPILE_FAIL) "testlib/compile-fail~fail" : "tools/regression/test/compile-fail~fail.cpp"
...found 210 targets...
...updating 157 targets...
MkDir1 ..\..\..\bin.v2\tools\regression\test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~fail.obj
compile-fail~fail.cpp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~fail.obj.rsp"
...failed compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~fail.obj...
...removing ..\..\..\bin.v2\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~fail.obj
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.obj
compile-fail~pass.cpp
compile-fail~pass.cpp(9) : fatal error C1189: #error : example of a compile failure
(failed-as-expected) ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.obj
**passed** ..\..\..\bin.v2\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj
compile~fail.cpp
compile~fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj.rsp"
...failed compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj...
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1\debug\link-static\threading-multi\compile~pass.obj
compile~pass.cpp
**passed** ..\..\..\bin.v2\tools\regression\test\compile~pass.test\msvc-7.1\debug\link-static\threading-multi\compile~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~warn.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1\debug\link-static\threading-multi\compile~warn.obj
compile~warn.cpp
compile~warn.cpp(15) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\compile~warn.cpp(15) : warning C4700: local variable 'ul' used without having been initialized
**passed** ..\..\..\bin.v2\tools\regression\test\compile~warn.test\msvc-7.1\debug\link-static\threading-multi\compile~warn.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.obj
link~fail.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe
link~fail.obj : error LNK2019: unresolved external symbol "int __cdecl f(void)" (?f@@YAHXZ) referenced in function _main
..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe : fatal error LNK1120: 1 unresolved externals
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
link /NOLOGO /INCREMENTAL:NO /DEBUG /subsystem:console /out:"..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe" @"..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.rsp"
if errorlevel 1 exit %errorlevel%
if exist "..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.manifest" (
mt -nologo -manifest "..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.manifest" "-outputresource:..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe;1"
)
...failed msvc.link ..\..\..\bin.v2\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe...
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug\link-static\threading-multi\link~pass.obj
link~pass.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug\link-static\threading-multi\link~pass.exe
**passed** ..\..\..\bin.v2\tools\regression\test\link~pass.test\msvc-7.1\debug\link-static\threading-multi\link~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.obj
link-fail~fail.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
link /NOLOGO /INCREMENTAL:NO /DEBUG /subsystem:console /out:"..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe" @"..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.rsp"
if errorlevel 1 exit %errorlevel%
if exist "..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.manifest" (
mt -nologo -manifest "..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.manifest" "-outputresource:..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe;1"
)
...failed msvc.link ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe...
...removing ..\..\..\bin.v2\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.obj
link-fail~pass.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.exe
link-fail~pass.obj : error LNK2019: unresolved external symbol "int __cdecl f(void)" (?f@@YAHXZ) referenced in function _main
..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.exe : fatal error LNK1120: 1 unresolved externals
(failed-as-expected) ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.exe
**passed** ..\..\..\bin.v2\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj
run-fail~compile-fail.cpp
run-fail~compile-fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj.rsp"
...failed compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj...
...skipped <p..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi>run-fail~compile-fail.exe for lack of <p..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi>run-fail~compile-fail.obj...
...skipped <p..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi>run-fail~compile-fail.run for lack of <p..\..\..\bin.v2\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi>run-fail~compile-fail.exe...
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.obj
run-fail~fail-warn.cpp
run-fail~fail-warn.cpp(13) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run-fail~fail-warn.cpp(13) : warning C4700: local variable 'ul' used without having been initialized
msvc.link ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.run
1 file(s) copied.
..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.exe > ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.output 2>&1
set status=%ERRORLEVEL%
echo. >> ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.output
echo EXIT STATUS: %status% >> ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.output
if %status% EQU 0 (
copy ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.output ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.run
)
set verbose=0
if %status% NEQ 0 (
set verbose=1
)
if %verbose% EQU 1 (
echo ====== BEGIN OUTPUT ======
type ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.output
echo ====== END OUTPUT ======
)
exit %status%
...failed testing.capture-output ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.run...
...removing ..\..\..\bin.v2\tools\regression\test\run-fail~fail-warn.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail-warn.run
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.obj
run-fail~fail.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.run
1 file(s) copied.
..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.exe > ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.output 2>&1
set status=%ERRORLEVEL%
echo. >> ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.output
echo EXIT STATUS: %status% >> ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.output
if %status% EQU 0 (
copy ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.output ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.run
)
set verbose=0
if %status% NEQ 0 (
set verbose=1
)
if %verbose% EQU 1 (
echo ====== BEGIN OUTPUT ======
type ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.output
echo ====== END OUTPUT ======
)
exit %status%
...failed testing.capture-output ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.run...
...removing ..\..\..\bin.v2\tools\regression\test\run-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~fail.run
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\run-fail~pass.obj
run-fail~pass.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\run-fail~pass.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\run-fail~pass.run
====== BEGIN OUTPUT ======
example of output from a run-time failure
EXIT STATUS: 1
====== END OUTPUT ======
del /f /q "..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\run-fail~pass.exe"
...failed RmTemps ..\..\..\bin.v2\tools\regression\test\run-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\run-fail~pass.run...
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~fail.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug
...on 100th target...
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.obj
run~fail.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.run
====== BEGIN OUTPUT ======
EXIT STATUS: 1
====== END OUTPUT ======
..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.exe > ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.output 2>&1
set status=%ERRORLEVEL%
echo. >> ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.output
echo EXIT STATUS: %status% >> ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.output
if %status% EQU 0 (
copy ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.output ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.run
)
set verbose=0
if %status% NEQ 0 (
set verbose=1
)
if %verbose% EQU 1 (
echo ====== BEGIN OUTPUT ======
type ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.output
echo ====== END OUTPUT ======
)
exit %status%
...failed testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~fail.test\msvc-7.1\debug\link-static\threading-multi\run~fail.run...
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~note.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static\threading-multi\run~note.obj
run~note.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static\threading-multi\run~note.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static\threading-multi\run~note.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~note.test\msvc-7.1\debug\link-static\threading-multi\run~note.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj
run~pass.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn-note.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static\threading-multi\run~warn-note.obj
run~warn-note.cpp
run~warn-note.cpp(21) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run~warn-note.cpp(21) : warning C4700: local variable 'ul' used without having been initialized
msvc.link ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static\threading-multi\run~warn-note.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static\threading-multi\run~warn-note.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~warn-note.test\msvc-7.1\debug\link-static\threading-multi\run~warn-note.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static\threading-multi\run~warn.obj
run~warn.cpp
run~warn.cpp(15) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run~warn.cpp(15) : warning C4700: local variable 'ul' used without having been initialized
msvc.link ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static\threading-multi\run~warn.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static\threading-multi\run~warn.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~warn.test\msvc-7.1\debug\link-static\threading-multi\run~warn.test
...failed updating 9 targets...
...skipped 17 targets...
...updated 131 targets...

View File

@@ -1,167 +0,0 @@
<test-log library="" test-name="compile-fail~fail" test-type="" test-program="" target-directory="tools/regression/test/compile-fail~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="fail" timestamp="xxx">
compile-fail~fail.cpp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
cl /Zm800 -nologo @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\compile-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~fail.obj.rsp"
</compile>
</test-log>
<test-log library="" test-name="compile-fail~pass" test-type="" test-program="" target-directory="tools/regression/test/compile-fail~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
compile-fail~pass.cpp
compile-fail~pass.cpp(9) : fatal error C1189: #error : example of a compile failure
(failed-as-expected) C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\compile-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\compile-fail~pass.obj
</compile>
</test-log>
<test-log library="" test-name="compile~fail" test-type="" test-program="" target-directory="tools/regression/test/compile~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="fail" timestamp="xxx">
compile~fail.cpp
compile~fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
cl /Zm800 -nologo @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\compile~fail.test\msvc-7.1\debug\link-static\threading-multi\compile~fail.obj.rsp"
</compile>
</test-log>
<test-log library="" test-name="compile~pass" test-type="" test-program="" target-directory="tools/regression/test/compile~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
compile~pass.cpp
</compile>
</test-log>
<test-log library="" test-name="compile~warn" test-type="" test-program="" target-directory="tools/regression/test/compile~warn.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
compile~warn.cpp
compile~warn.cpp(15) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\compile~warn.cpp(15) : warning C4700: local variable 'ul' used without having been initialized
</compile>
</test-log>
<test-log library="" test-name="link-fail~fail" test-type="" test-program="" target-directory="tools/regression/test/link-fail~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
link-fail~fail.cpp
</compile>
<link result="fail" timestamp="xxx">
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
link /NOLOGO /INCREMENTAL:NO /DEBUG /subsystem:console /out:"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe" @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.rsp"
if errorlevel 1 exit %errorlevel%
if exist "C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.manifest" (
mt -nologo -manifest "C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe.manifest" "-outputresource:C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~fail.test\msvc-7.1\debug\link-static\threading-multi\link-fail~fail.exe;1"
)
</link>
</test-log>
<test-log library="" test-name="link-fail~pass" test-type="" test-program="" target-directory="tools/regression/test/link-fail~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
link-fail~pass.cpp
</compile>
<link result="succeed" timestamp="xxx">
link-fail~pass.obj : error LNK2019: unresolved external symbol "int __cdecl f(void)" (?f@@YAHXZ) referenced in function _main
C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.exe : fatal error LNK1120: 1 unresolved externals
(failed-as-expected) C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link-fail~pass.test\msvc-7.1\debug\link-static\threading-multi\link-fail~pass.exe
</link>
</test-log>
<test-log library="" test-name="link~fail" test-type="" test-program="" target-directory="tools/regression/test/link~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
link~fail.cpp
</compile>
<link result="fail" timestamp="xxx">
link~fail.obj : error LNK2019: unresolved external symbol "int __cdecl f(void)" (?f@@YAHXZ) referenced in function _main
C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe : fatal error LNK1120: 1 unresolved externals
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
link /NOLOGO /INCREMENTAL:NO /DEBUG /subsystem:console /out:"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe" @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.rsp"
if errorlevel 1 exit %errorlevel%
if exist "C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.manifest" (
mt -nologo -manifest "C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe.manifest" "-outputresource:C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\link~fail.test\msvc-7.1\debug\link-static\threading-multi\link~fail.exe;1"
)
</link>
</test-log>
<test-log library="" test-name="link~pass" test-type="" test-program="" target-directory="tools/regression/test/link~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
link~pass.cpp
</compile>
<link result="succeed" timestamp="xxx">
</link>
</test-log>
<test-log library="" test-name="run-fail~compile-fail" test-type="" test-program="" target-directory="tools/regression/test/run-fail~compile-fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="fail" timestamp="xxx">
run-fail~compile-fail.cpp
run-fail~compile-fail.cpp(9) : fatal error C1189: #error : example of a compile failure
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
cl /Zm800 -nologo @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\general\actual\tools\regression\test\run-fail~compile-fail.test\msvc-7.1\debug\link-static\threading-multi\run-fail~compile-fail.obj.rsp"
</compile>
</test-log>
<test-log library="" test-name="run-fail~fail-warn" test-type="" test-program="" target-directory="tools/regression/test/run-fail~fail-warn.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run-fail~fail-warn.cpp
run-fail~fail-warn.cpp(13) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run-fail~fail-warn.cpp(13) : warning C4700: local variable 'ul' used without having been initialized
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="fail" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run-fail~fail" test-type="" test-program="" target-directory="tools/regression/test/run-fail~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run-fail~fail.cpp
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="fail" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run-fail~pass" test-type="" test-program="" target-directory="tools/regression/test/run-fail~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run-fail~pass.cpp
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="succeed" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run~fail" test-type="" test-program="" target-directory="tools/regression/test/run~fail.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run~fail.cpp
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="fail" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run~note" test-type="" test-program="" target-directory="tools/regression/test/run~note.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run~note.cpp
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="succeed" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run~pass" test-type="" test-program="" target-directory="tools/regression/test/run~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run~pass.cpp
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="succeed" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run~warn-note" test-type="" test-program="" target-directory="tools/regression/test/run~warn-note.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run~warn-note.cpp
run~warn-note.cpp(21) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run~warn-note.cpp(21) : warning C4700: local variable 'ul' used without having been initialized
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="succeed" timestamp="xxx">
</run>
</test-log>
<test-log library="" test-name="run~warn" test-type="" test-program="" target-directory="tools/regression/test/run~warn.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="succeed" timestamp="xxx">
run~warn.cpp
run~warn.cpp(15) : warning C4244: '=' : conversion from 'unsigned long' to 'short', possible loss of data
c:\users\misha\stuff\boost\head\boost\tools\regression\test\run~warn.cpp(15) : warning C4700: local variable 'ul' used without having been initialized
</compile>
<link result="succeed" timestamp="xxx"></link>
<run result="succeed" timestamp="xxx">
</run>
</test-log>

View File

@@ -1,33 +0,0 @@
locate-root "..\..\..\bin.v2"
C:\Users\Misha\Stuff\boost\HEAD\boost\tools\regression\test>C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost\tools\jam\src\bin.ntx86\bjam.exe --dump-tests --v2 msvc-7.1 "-sBOOST_BUILD_PATH=C:\Users\Misha\Stuff\boost\HEAD\bin\.." "-sBOOST_ROOT="C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost"
boost-test(RUN) "testlib/run~pass" : "tools/regression/test/run~pass.cpp"
boost-test(RUN) "testlib/run~pass" : "tools/regression/test/run~pass2s.cpp"
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj
run~pass.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.obj
run~pass2.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe
testing.capture-output ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.run
1 file(s) copied.
**passed** ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.test
...failed updating 9 targets...
...skipped 17 targets...
...updated 131 targets...

View File

@@ -1,38 +0,0 @@
locate-root "..\..\..\bin.v2"
C:\Users\Misha\Stuff\boost\HEAD\boost\tools\regression\test>C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost\tools\jam\src\bin.ntx86\bjam.exe --dump-tests --v2 msvc-7.1 "-sBOOST_BUILD_PATH=C:\Users\Misha\Stuff\boost\HEAD\bin\.." "-sBOOST_ROOT="C:\Users\Misha\Stuff\boost\HEAD\bin\..\boost"
boost-test(RUN) "testlib/run~pass" : "tools/regression/test/run~pass.cpp"
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj
run~pass.cpp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
cl /Zm800 -nologo @"..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj.rsp"
...failed compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj...
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static
MkDir1 ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi
compile-c-c++ ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.obj
run~pass2.cpp
msvc.link ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" >nul
link /NOLOGO /INCREMENTAL:NO /DEBUG /subsystem:console /out:"..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe" @"..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe.rsp"
if errorlevel 1 exit %errorlevel%
if exist "..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe.manifest" (
mt -nologo -manifest "..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe.manifest" "-outputresource:..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe;1"
)
...failed msvc.link ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe...
...removing ..\..\..\bin.v2\tools\regression\test\run~pass2.test\msvc-7.1\debug\link-static\threading-multi\run~pass2.exe
...failed updating 9 targets...
...skipped 17 targets...
...updated 131 targets...

View File

@@ -1,9 +0,0 @@
<test-log library="" test-name="run~pass" test-type="" test-program="" target-directory="tools/regression/test/run~pass.test/msvc-7.1/debug/link-static/threading-multi" toolset="msvc-7.1" show-run-output="false">
<compile result="fail" timestamp="xxx">
run~pass.cpp
call "C:\Program Files\Microsoft Visual Studio .NET 2003\Vc7\bin\vcvars32.bat" &gt;nul
cl /Zm800 -nologo @"C:\users\Misha\Stuff\boost\boost\tools\regression\test\test-cases\incremental\actual\tools\regression\test\run~pass.test\msvc-7.1\debug\link-static\threading-multi\run~pass.obj.rsp"
</compile>
</test-log>

View File

@@ -1,19 +0,0 @@
rem Copyright Beman Dawes 2005
rem Distributed under the Boost Software License, Version 1.0.
rem See http://www.boost.org/LICENSE_1_0.txt
echo Begin build...
pushd ..\build
b2 variant=release install >build.log 2>&1
start build.log
popd
echo Begin test processing...
b2 --dump-tests %* >test.log 2>&1
echo Begin log processing...
..\build\bin\process_jam_log --v2 <test.log
start test.log
echo Begin compiler status processing...
..\build\bin\compiler_status --v2 . test_status.html test_links.html
start test_status.html

View File

@@ -1,181 +0,0 @@
# Copyright (c) MetaCommunications, Inc. 2003-2005
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import difflib
import os
import re
import shutil
import string
import sys
def scan_for_test_cases():
return [ os.path.join( "test-cases", x ) for x in os.listdir( "test-cases" ) if x != "CVS" ]
def clean_dir( dir ):
if os.path.exists( dir ):
shutil.rmtree( dir )
os.makedirs( dir )
def system( commands ):
if sys.platform == 'win32':
f = open( 'tmp.cmd', 'w' )
f.write( string.join( commands, '\n' ) )
f.close()
rc = os.system( 'tmp.cmd' )
os.unlink( 'tmp.cmd' )
return rc
else:
rc = os.system( '&&'.join( commands ) )
return rc
def checked_system( commands, valid_return_codes = [ 0 ] ):
rc = system( commands )
if rc not in [ 0 ] + valid_return_codes:
raise Exception( 'Command sequence "%s" failed with return code %d' % ( commands, rc ) )
return rc
def list_recursively( dir ):
r = []
for root, dirs, files in os.walk( dir, topdown=False ):
root = root[ len( dir ) + 1 : ]
r.extend( [ os.path.join( root, x ) for x in dirs ] )
r.extend( [ os.path.join( root, x ) for x in files ] )
return r
def find_process_jam_log():
root = "../../../"
for root, dirs, files in os.walk( os.path.join( root, "bin.v2" ), topdown=False ):
if "process_jam_log.exe" in files:
return os.path.abspath( os.path.normpath( os.path.join( root, "process_jam_log.exe" ) ) )
if "process_jam_log" in files:
return os.path.abspath( os.path.normpath( os.path.join( root, "process_jam_log" ) ) )
return None
def process_jam_log( executable, file, locate_root, results_dir ):
args = []
args.append( executable )
# args.append( '--echo' )
args.append( '--create-directories' )
args.append( '--v2' )
args.append( locate_root )
args.append( '<' )
args.append( file )
cmd = " ".join( args )
print "Running process_jam_log (%s)" % cmd
checked_system( [ cmd ] )
def read_file( file_path ):
f = open( file_path )
try:
return f.read()
finally:
f.close()
def remove_timestamps( log_lines ):
return [ re.sub( "timestamp=\"[^\"]+\"", "timestamp=\"\"", x ) for x in log_lines ]
def determine_locate_root( bjam_log ):
locate_root = None
f = open( 'bjam.log' )
try:
locate_root_re = re.compile( r'locate-root\s+"(.*)"' )
for l in f.readlines():
m = locate_root_re.match( l )
if m:
locate_root = m.group(1)
break
finally:
f.close()
return locate_root
def read_file( path ):
f = open( path )
try:
return f.read()
finally:
f.close()
def read_file_lines( path ):
f = open( path )
try:
return f.readlines()
finally:
f.close()
def write_file( path, content ):
f = open( path, 'w' )
try:
return f.write( content )
finally:
f.close()
def write_file_lines( path, content ):
f = open( path, 'w' )
try:
return f.writelines( content )
finally:
f.close()
def run_test_cases( test_cases ):
process_jam_log_executable = find_process_jam_log()
print 'Found process_jam_log: %s' % process_jam_log_executable
initial_dir = os.getcwd()
for test_case in test_cases:
os.chdir( initial_dir )
print 'Running test case "%s"' % test_case
os.chdir( test_case )
if os.path.exists( "expected" ):
locate_root = determine_locate_root( 'bjam.log' )
print 'locate_root: %s' % locate_root
actual_results_dir = os.path.join( test_case, "actual" )
clean_dir( "actual" )
os.chdir( "actual" )
root = os.getcwd()
i = 0
while 1:
if i == 0:
bjam_log_file = 'bjam.log'
else:
bjam_log_file = 'bjam.log.%0d' % i
i += 1
print 'Looking for %s' % bjam_log_file
if not os.path.exists( os.path.join( '..', bjam_log_file ) ):
print ' does not exists'
break
print ' found'
write_file_lines(bjam_log_file.replace( 'bjam', 'bjam_' ),
[ x.replace( locate_root, root ) for x in read_file_lines( os.path.join( '..', bjam_log_file ) ) ] )
process_jam_log( executable = process_jam_log_executable
, results_dir = "."
, locate_root = root
, file=bjam_log_file.replace( 'bjam', 'bjam_' ) )
actual_content = list_recursively( "." )
actual_content.sort()
result_xml = []
for test_log in [ x for x in actual_content if os.path.splitext( x )[1] == '.xml' ]:
print 'reading %s' % test_log
result = [ re.sub( r'timestamp="(.*)"', 'timestamp="xxx"', x ) for x in read_file_lines( test_log ) ]
result_xml.extend( result )
write_file_lines( 'results.xml', result_xml )
os.chdir( '..' )
assert read_file( 'expected/results.xml' ) == read_file( 'actual/results.xml' )
os.chdir( '..' )
else:
raise ' Test case "%s" doesn\'t contain the expected results directory ("expected" )' % ( test_case )
run_test_cases( scan_for_test_cases() )
# print find_process_jam_log()

View File

@@ -7,15 +7,16 @@
exe process_jam_log
:
../src/process_jam_log.cpp
../../common/build//tiny_xml
/boost/filesystem//boost_filesystem/<link>static
:
<define>BOOST_ALL_NO_LIB=1
<define>_CRT_SECURE_NO_WARNINGS
<implicit-dependency>/boost//headers
../../common/build//process_jam_log
;
explicit process_jam_log ;
alias install : bin ;
install bin : process_jam_log/<variant>release ;
explicit install bin ;
alias install : exec ;
install exec
:
process_jam_log/<variant>release
:
<install-type>EXE
<location>$(INSTALL_PREFIX_EXEC)
;
explicit install exec ;

View File

@@ -1,600 +1,18 @@
// process jam regression test output into XML -----------------------------//
// Copyright Beman Dawes 2002. Distributed under the Boost
// Copyright Beman Dawes 2002.
// Copyright Rene Rivera 2015.
// Distributed under the Boost
// Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/tools/regression for documentation.
#define BOOST_FILESYSTEM_VERSION 3
#include <boost/config/warning_disable.hpp>
#include "tiny_xml.hpp"
#include "common.hpp"
#include "boost/filesystem/operations.hpp"
#include "boost/filesystem/fstream.hpp"
#include "boost/filesystem/exception.hpp"
#include "boost/filesystem/convenience.hpp"
#include <iostream>
#include <string>
#include <cstring>
#include <map>
#include <utility> // for make_pair
#include <ctime>
#include <cctype> // for tolower
#include <cstdlib> // for exit
#include <vector>
using std::string;
namespace xml = boost::tiny_xml;
namespace fs = boost::filesystem;
// options
static bool echo = false;
static bool create_dirs = false;
static bool boost_build_v2 = true;
namespace
{
struct test_info
{
string file_path; // relative boost-root
string type;
bool always_show_run_output;
};
typedef std::map< string, test_info > test2info_map; // key is test-name
test2info_map test2info;
fs::path boost_root;
fs::path locate_root; // ALL_LOCATE_TARGET (or boost_root if none)
// append_html -------------------------------------------------------------//
void append_html( const string & src, string & target )
{
// there are a few lines we want to ignore
if ( src.find( "th target..." ) != string::npos
|| src.find( "cc1plus.exe: warning: changing search order for system directory" ) != string::npos
|| src.find( "cc1plus.exe: warning: as it has already been specified as a non-system directory" ) != string::npos
) return;
// on some platforms (e.g. tru64cxx) the following line is a real performance boost
target.reserve(src.size() * 2 + target.size());
for ( string::size_type pos = 0; pos < src.size(); ++pos )
{
if ( src[pos] == '<' ) target += "&lt;";
else if ( src[pos] == '>' ) target += "&gt;";
else if ( src[pos] == '&' ) target += "&amp;";
else target += src[pos];
}
}
// timestamp ---------------------------------------------------------------//
string timestamp()
{
char run_date[128];
std::time_t tod;
std::time( &tod );
std::strftime( run_date, sizeof(run_date),
"%Y-%m-%d %X UTC", std::gmtime( &tod ) );
return string( run_date );
}
// convert path separators to forward slashes ------------------------------//
void convert_path_separators( string & s )
{
for ( string::iterator itr = s.begin(); itr != s.end(); ++itr )
if ( *itr == '\\' || *itr == '!' )
*itr = '/';
}
// trim_left ----------------------------------------------------------------//
std::string trim_left( std::string const& s )
{
std::string::size_type const pos( s.find_first_not_of(' ') );
return pos != std::string::npos
? s.substr( pos, s.size() - pos + 1 )
: ""
;
}
// split --------------------------------------------------------------------//
std::vector<std::string> split( std::string const& s )
{
std::string::size_type const pos( s.find_first_of(' ') );
std::vector<std::string> result( 1, s.substr( 0, pos ) );
if ( pos == std::string::npos )
return result;
std::vector<std::string> const rest( split( trim_left( s.substr( pos, s.size() - pos + 1 ) ) ) );
result.insert( result.end(), rest.begin(), rest.end() );
return result;
}
string test_path_to_library_name( string const& path );
string test_name( const string & s );
// extract a target directory path from a jam target string --------------------------//
// s may be relative to the initial_path:
// ..\..\..\libs\foo\build\bin\libfoo.lib\vc7\debug\runtime-link-dynamic\boo.obj
// s may be absolute:
// d:\myboost\libs\foo\build\bin\libfoo.lib\vc7\debug\runtime-link-dynamic\boo.obj
// return path is always relative to the boost directory tree:
// libs/foo/build/bin/libfs.lib/vc7/debug/runtime-link-dynamic
string target_directory( const string & s )
{
string temp( s );
convert_path_separators( temp );
temp.erase( temp.find_last_of( "/" ) ); // remove leaf
temp = split( trim_left( temp ) ).back();
if ( temp[0] == '.' )
temp.erase( 0, temp.find_first_not_of( "./" ) );
else
temp.erase( 0, locate_root.string().size()+1 );
std::string testid = test_path_to_library_name( temp ) + "/" + test_name( temp );
test2info_map::const_iterator info = test2info.find(testid);
if ( info != test2info.end() )
{
// Only keep path components that are part of the
// build variant.
string source_directory = info->second.file_path;
string::size_type last = source_directory.find_last_of( "/" );
if ( last == string::npos )
source_directory.clear();
else
source_directory.erase( last );
// find the start of the shared tail
string::size_type source_pos = source_directory.size(), temp_pos = temp.size();
for ( ; source_pos != 0 && temp_pos != 0; --source_pos, --temp_pos )
{
if ( source_directory[ source_pos - 1 ] != temp[ temp_pos - 1 ] )
break;
}
// erase all path components in the shared tail
temp_pos = temp.find( '/', temp_pos );
if ( temp_pos != string::npos )
temp.erase( temp_pos );
}
if ( echo )
std::cout << "\ttarget_directory( \"" << s << "\") -> \"" << temp << "\"" << std::endl;
return temp;
}
string::size_type target_name_end( const string & s )
{
string::size_type pos = s.find( ".test/" );
if ( pos == string::npos ) pos = s.find( ".dll/" );
if ( pos == string::npos ) pos = s.find( ".so/" );
if ( pos == string::npos ) pos = s.find( ".lib/" );
if ( pos == string::npos ) pos = s.find( ".pyd/" );
if ( pos == string::npos ) pos = s.find( ".a/" );
return pos;
}
string toolset( const string & s )
{
string::size_type pos = target_name_end( s );
if ( pos == string::npos )
pos = s.find( "build/" );
if ( pos == string::npos )
return "";
pos = s.find( "/", pos ) + 1;
return s.substr( pos, s.find( "/", pos ) - pos );
}
string test_name( const string & s )
{
string::size_type pos = target_name_end( s );
if ( pos == string::npos )
return "";
string::size_type pos_start = s.rfind( '/', pos ) + 1;
return s.substr( pos_start,
(s.find( ".test/" ) != string::npos
? pos : s.find( "/", pos )) - pos_start );
}
// Take a path to a target directory of test, and
// returns library name corresponding to that path.
string test_path_to_library_name( string const& path )
{
std::string result;
string::size_type start_pos( path.find( "libs/" ) );
if ( start_pos == string::npos )
{
start_pos = path.find( "tools/" );
}
if ( start_pos != string::npos )
{
// The path format is ...libs/functional/hash/test/something.test/....
// So, the part between "libs" and "test/something.test" can be considered
// as library name. But, for some libraries tests are located too deep,
// say numeric/ublas/test/test1 directory, and some libraries have tests
// in several subdirectories (regex/example and regex/test). So, nested
// directory may belong to several libraries.
// To disambituate, it's possible to place a 'sublibs' file in
// a directory. It means that child directories are separate libraries.
// It's still possible to have tests in the directory that has 'sublibs'
// file.
std::string interesting;
start_pos = path.find( '/', start_pos ) + 1;
string::size_type end_pos( path.find( ".test/", start_pos ) );
end_pos = path.rfind('/', end_pos);
if (path.substr(end_pos - 5, 5) == "/test")
interesting = path.substr( start_pos, end_pos - 5 - start_pos );
else
interesting = path.substr( start_pos, end_pos - start_pos );
// Take slash separate elements until we have corresponding 'sublibs'.
end_pos = 0;
for(;;)
{
end_pos = interesting.find('/', end_pos);
if (end_pos == string::npos)
{
result = interesting;
break;
}
result = interesting.substr(0, end_pos);
if ( fs::exists( ( boost_root / "libs" ) / result / "sublibs" ) )
{
end_pos = end_pos + 1;
}
else
break;
}
}
// Strip off a trailing /test
// This needs to be applied after the sublibs search.
if(result.size() >= 5 && result.substr(result.size() - 5, 5) == "/test")
result = result.substr(0, result.size() - 5);
return result;
}
// Tries to find target name in the string 'msg', starting from
// position start.
// If found, extract the directory name from the target name and
// stores it in 'dir', and return the position after the target name.
// Otherwise, returns string::npos.
string::size_type parse_skipped_msg_aux(const string& msg,
string::size_type start,
string& dir)
{
dir.clear();
string::size_type start_pos = msg.find( '<', start );
if ( start_pos == string::npos )
return string::npos;
++start_pos;
string::size_type end_pos = msg.find( '>', start_pos );
dir += msg.substr( start_pos, end_pos - start_pos );
if ( boost_build_v2 )
{
// The first letter is a magic value indicating
// the type of grist.
convert_path_separators( dir );
dir.erase( 0, 1 );
// We need path from root, not from 'status' dir.
if (dir.find("../") == 0)
dir.erase(0,3);
else // dir is always relative to the boost directory tree
dir.erase( 0, locate_root.string().size()+1 );
}
else
{
if ( dir[0] == '@' )
{
// new style build path, rooted build tree
convert_path_separators( dir );
dir.replace( 0, 1, "bin/" );
}
else
{
// old style build path, integrated build tree
start_pos = dir.rfind( '!' );
convert_path_separators( dir );
string::size_type path_sep_pos = dir.find( '/', start_pos + 1 );
if ( path_sep_pos != string::npos )
dir.insert( path_sep_pos, "/bin" );
else
{
// see http://article.gmane.org/gmane.comp.lib.boost.devel/146688;
// the following code assumes that: a) 'dir' is not empty,
// b) 'end_pos != string::npos' and c) 'msg' always ends with '...'
if ( dir[dir.size() - 1] == '@' )
dir += "/" + msg.substr( end_pos + 1, msg.size() - end_pos - 1 - 3 );
}
}
}
return end_pos;
}
// the format of paths is really kinky, so convert to normal form
// first path is missing the leading "..\".
// first path is missing "\bin" after "status".
// second path is missing the leading "..\".
// second path is missing "\bin" after "build".
// second path uses "!" for some separators.
void parse_skipped_msg( const string & msg,
string & first_dir, string & second_dir )
{
string::size_type pos = parse_skipped_msg_aux(msg, 0, first_dir);
if (pos == string::npos)
return;
parse_skipped_msg_aux(msg, pos, second_dir);
}
string revision(const string & test_pgm)
{
if (test_pgm.empty())
return std::string();
std::string sha;
fs::path p(boost_root / test_pgm);
p.remove_filename();
fs::path cp(fs::current_path());
fs::current_path(p);
std::system("git rev-parse --short=6 HEAD >.short-sha");
std::fstream file(".short-sha");
file >> sha;
fs::current_path(cp);
//std::cout << "***" << sha << std::endl;
return sha;
}
// test_log hides database details ---------------------------------------------------//
class test_log
: boost::noncopyable
{
const string & m_target_directory;
xml::element_ptr m_root;
public:
test_log( const string & target_directory,
const string & test_name,
const string & toolset,
bool force_new_file )
: m_target_directory( target_directory )
{
if ( !force_new_file )
{
fs::path pth( locate_root / target_directory / "test_log.xml" );
fs::ifstream file( pth );
if ( file ) // existing file
{
try
{
m_root = xml::parse( file, pth.string() );
return;
}
catch(...)
{
// unable to parse existing XML file, fall through
}
}
}
string library_name( test_path_to_library_name( target_directory ) );
test_info info;
test2info_map::iterator itr( test2info.find( library_name + "/" + test_name ) );
if ( itr != test2info.end() )
info = itr->second;
if ( !info.file_path.empty() )
library_name = test_path_to_library_name( info.file_path );
if ( info.type.empty() )
{
if ( target_directory.find( ".lib/" ) != string::npos
|| target_directory.find( ".dll/" ) != string::npos
|| target_directory.find( ".so/" ) != string::npos
|| target_directory.find( ".dylib/" ) != string::npos
|| target_directory.find( "/build/" ) != string::npos
)
{
info.type = "lib";
}
else if ( target_directory.find( ".pyd/" ) != string::npos )
info.type = "pyd";
}
m_root.reset( new xml::element( "test-log" ) );
m_root->attributes.push_back(
xml::attribute("library", library_name));
m_root->attributes.push_back(
xml::attribute("revision", revision(info.file_path)));
m_root->attributes.push_back(
xml::attribute( "test-name", test_name ) );
m_root->attributes.push_back(
xml::attribute( "test-type", info.type ) );
m_root->attributes.push_back(
xml::attribute( "test-program", info.file_path ) );
m_root->attributes.push_back(
xml::attribute( "target-directory", target_directory ) );
m_root->attributes.push_back(
xml::attribute( "toolset", toolset ) );
m_root->attributes.push_back(
xml::attribute( "show-run-output",
info.always_show_run_output ? "true" : "false" ) );
}
~test_log()
{
fs::path pth( locate_root / m_target_directory / "test_log.xml" );
if ( create_dirs && !fs::exists( pth.branch_path() ) )
fs::create_directories( pth.branch_path() );
fs::ofstream file( pth );
if ( !file )
{
std::cout << "*****Warning - can't open output file: "
<< pth.string() << "\n";
}
else xml::write( *m_root, file );
}
const string & target_directory() const { return m_target_directory; }
void remove_action( const string & action_name )
// no effect if action_name not found
{
xml::element_list::iterator itr;
for ( itr = m_root->elements.begin();
itr != m_root->elements.end() && (*itr)->name != action_name;
++itr ) {}
if ( itr != m_root->elements.end() )
m_root->elements.erase( itr );
}
void add_action( const string & action_name,
const string & result,
const string & timestamp,
const string & content )
{
remove_action( action_name );
xml::element_ptr action( new xml::element(action_name) );
m_root->elements.push_back( action );
action->attributes.push_back( xml::attribute( "result", result ) );
action->attributes.push_back( xml::attribute( "timestamp", timestamp ) );
action->content = content;
}
};
// message_manager maps input messages into test_log actions ---------------//
class message_manager
: boost::noncopyable
{
string m_action_name; // !empty() implies action pending
// IOW, a start_message awaits stop_message
string m_target_directory;
string m_test_name;
string m_toolset;
bool m_note; // if true, run result set to "note"
// set false by start_message()
// data needed to stop further compile action after a compile failure
// detected in the same target directory
string m_previous_target_directory;
bool m_compile_failed;
public:
message_manager() : m_note(false) {}
~message_manager() { /*assert( m_action_name.empty() );*/ }
bool note() const { return m_note; }
void note( bool value ) { m_note = value; }
void start_message( const string & action_name,
const string & target_directory,
const string & test_name,
const string & toolset,
const string & prior_content )
{
assert( !target_directory.empty() );
if ( !m_action_name.empty() )
stop_message( prior_content );
m_action_name = action_name;
m_target_directory = target_directory;
m_test_name = test_name;
m_toolset = toolset;
m_note = false;
if ( m_previous_target_directory != target_directory )
{
m_previous_target_directory = target_directory;
m_compile_failed = false;
}
}
void stop_message( const string & content )
{
if ( m_action_name.empty() )
return;
stop_message( m_action_name, m_target_directory,
"succeed", timestamp(), content );
}
void stop_message( const string & action_name,
const string & target_directory,
const string & result,
const string & timestamp,
const string & content )
// the only valid action_names are "compile", "link", "run", "lib"
{
// My understanding of the jam output is that there should never be
// a stop_message that was not preceeded by a matching start_message.
// That understanding is built into message_manager code.
assert( m_action_name == action_name );
assert( m_target_directory == target_directory );
assert( result == "succeed" || result == "fail" );
// if test_log.xml entry needed
if ( !m_compile_failed
|| action_name != "compile"
|| m_previous_target_directory != target_directory )
{
if ( action_name == "compile" && result == "fail" )
m_compile_failed = true;
test_log tl( target_directory,
m_test_name, m_toolset, action_name == "compile" );
tl.remove_action( "lib" ); // always clear out lib residue
// dependency removal
if ( action_name == "lib" )
{
tl.remove_action( "compile" );
tl.remove_action( "link" );
tl.remove_action( "run" );
}
else if ( action_name == "compile" )
{
tl.remove_action( "link" );
tl.remove_action( "run" );
if ( result == "fail" )
m_compile_failed = true;
}
else if ( action_name == "link" )
{
tl.remove_action( "run" );
}
// dependency removal won't work right with random names, so assert
else { assert( action_name == "run" ); }
// add the "run" stop_message action
tl.add_action( action_name,
result == "succeed" && note() ? std::string("note") : result,
timestamp, content );
}
m_action_name = ""; // signal no pending action
m_previous_target_directory = target_directory;
}
};
}
extern int process_jam_log( const std::vector<std::string> & args );
// main --------------------------------------------------------------------//
@@ -602,344 +20,11 @@ namespace
int main( int argc, char ** argv )
{
// Turn off synchronization with corresponding C standard library files. This
// gives a significant speed improvement on platforms where the standard C++
// streams are implemented using standard C files.
std::ios::sync_with_stdio(false);
fs::initial_path();
std::istream* input = 0;
if ( argc <= 1 )
{
std::cout << "process_jam_log [--echo] [--create-directories] [--v1|--v2]\n"
" [--boost-root boost_root] [--locate-root locate_root]\n"
" [--input-file input_file]\n"
" [locate-root]\n"
"--echo - verbose diagnostic output.\n"
"--create-directories - if the directory for xml file doesn't exists - creates it.\n"
" usually used for processing logfile on different machine\n"
"--v2 - bjam version 2 used (default).\n"
"--v1 - bjam version 1 used.\n"
"--boost-root - the root of the boost installation being used. If not defined\n"
" assume to run from within it and discover it heuristically.\n"
"--locate-root - the same as the bjam ALL_LOCATE_TARGET\n"
" parameter, if any. Default is boost-root.\n"
"--input-file - the output of a bjam --dump-tests run. Default is std input.\n"
;
return 1;
}
std::vector<std::string> args;
while ( argc > 1 )
{
if ( std::strcmp( argv[1], "--echo" ) == 0 )
{
echo = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--create-directories" ) == 0 )
{
create_dirs = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--v2" ) == 0 )
{
boost_build_v2 = true;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--v1" ) == 0 )
{
boost_build_v2 = false;
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--boost-root" ) == 0 )
{
--argc; ++argv;
if ( argc == 1 )
{
std::cout << "Abort: option --boost-root requires a directory argument\n";
std::exit(1);
}
boost_root = fs::path( argv[1] );
if ( !boost_root.is_complete() )
boost_root = ( fs::initial_path() / boost_root ).normalize();
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--locate-root" ) == 0 )
{
--argc; ++argv;
if ( argc == 1 )
{
std::cout << "Abort: option --locate-root requires a directory argument\n";
std::exit(1);
}
locate_root = fs::path( argv[1] );
--argc; ++argv;
}
else if ( std::strcmp( argv[1], "--input-file" ) == 0 )
{
--argc; ++argv;
if ( argc == 1 )
{
std::cout << "Abort: option --input-file requires a filename argument\n";
std::exit(1);
}
input = new std::ifstream(argv[1]);
--argc; ++argv;
}
else if ( *argv[1] == '-' )
{
std::cout << "Abort: unknown option; invoke with no arguments to see list of valid options\n";
return 1;
}
else
{
locate_root = fs::path( argv[1] );
--argc; ++argv;
}
args.push_back( argv[1] );
--argc; ++argv;
}
if ( boost_root.empty() )
{
boost_root = boost::regression_tools::boost_root_path();
if ( boost_root.empty() )
{
std::cout << "Abort: not able to locate the boost root\n";
return 1;
}
boost_root.normalize();
}
if ( locate_root.empty() )
{
locate_root = boost_root;
}
else if ( !locate_root.is_complete() )
{
locate_root = ( fs::initial_path() / locate_root ).normalize();
}
if ( input == 0 )
{
input = &std::cin;
}
std::cout << "boost_root: " << boost_root.string() << '\n'
<< "locate_root: " << locate_root.string() << '\n';
message_manager mgr;
string line;
string content;
bool capture_lines = false;
// This loop looks at lines for certain signatures, and accordingly:
// * Calls start_message() to start capturing lines. (start_message() will
// automatically call stop_message() if needed.)
// * Calls stop_message() to stop capturing lines.
// * Capture lines if line capture on.
static const int max_line_length = 8192;
int line_num = 0;
while ( std::getline( *input, line ) )
{
if (max_line_length < line.size())
line = line.substr(0, max_line_length);
++line_num;
std::vector<std::string> const line_parts( split( line ) );
std::string const line_start( line_parts[0] != "...failed"
? line_parts[0]
: line_parts[0] + " " + line_parts[1]
);
if ( echo )
{
std::cout
<< "line " << line_num << ": " << line << "\n"
<< "\tline_start: " << line_start << "\n";
}
// create map of test-name to test-info
if ( line_start.find( "boost-test(" ) == 0 )
{
string::size_type pos = line.find( '"' );
string test_name( line.substr( pos+1, line.find( '"', pos+1)-pos-1 ) );
test_info info;
info.always_show_run_output
= line.find( "\"always_show_run_output\"" ) != string::npos;
info.type = line.substr( 11, line.find( ')' )-11 );
for (unsigned int i = 0; i!=info.type.size(); ++i )
{ info.type[i] = std::tolower( info.type[i] ); }
pos = line.find( ':' );
// the rest of line is missing if bjam didn't know how to make target
if ( pos + 1 != line.size() )
{
info.file_path = line.substr( pos+3,
line.find( "\"", pos+3 )-pos-3 );
convert_path_separators( info.file_path );
if ( info.file_path.find( "libs/libs/" ) == 0 ) info.file_path.erase( 0, 5 );
if ( test_name.find( "/" ) == string::npos )
test_name = "/" + test_name;
test2info.insert( std::make_pair( test_name, info ) );
// std::cout << test_name << ", " << info.type << ", " << info.file_path << "\n";
}
else
{
std::cout << "*****Warning - missing test path: " << line << "\n"
<< " (Usually occurs when bjam doesn't know how to make a target)\n";
}
continue;
}
// these actions represent both the start of a new action
// and the end of a failed action
else if ( line_start.find( "C++-action" ) != string::npos
|| line_start.find( "vc-C++" ) != string::npos
|| line_start.find( "C-action" ) != string::npos
|| line_start.find( "Cc-action" ) != string::npos
|| line_start.find( "vc-Cc" ) != string::npos
|| line_start.find( ".compile.") != string::npos
|| line_start.find( "compile-") != string::npos
|| line_start.find( "-compile") != string::npos
|| line_start.find( "Link-action" ) != string::npos
|| line_start.find( "vc-Link" ) != string::npos
|| line_start.find( "Archive-action" ) != string::npos
|| line_start.find( ".archive") != string::npos
|| ( line_start.find( ".link") != string::npos &&
// .linkonce is present in gcc linker messages about
// unresolved symbols. We don't have to parse those
line_start.find( ".linkonce" ) == string::npos )
)
{
//~ if ( !test2info.size() )
//~ {
//~ std::cout << "*****Error - No \"boost-test\" lines encountered.\n"
//~ " (Usually occurs when bjam was envoked without the --dump-tests option\n"
//~ " or bjam was envoked in the wrong directory)\n";
//~ return 1;
//~ }
string action( ( line_start.find( "Link-action" ) != string::npos
|| line_start.find( "vc-Link" ) != string::npos
|| line_start.find( "Archive-action" ) != string::npos
|| line_start.find( ".archive") != string::npos
|| line_start.find( ".link") != string::npos
)
? "link" : "compile"
);
if ( line_start.find( "...failed " ) != string::npos )
{
mgr.stop_message( action, target_directory( line ),
"fail", timestamp(), content );
}
else
{
string target_dir( target_directory( line ) );
mgr.start_message( action, target_dir,
test_name( target_dir ), toolset( target_dir ), content );
}
content = "\n";
capture_lines = true;
}
// these actions are only used to stop the previous action
else if ( line_start.find( "-Archive" ) != string::npos
|| line_start.find( "MkDir" ) == 0
|| line_start.find( "common.mkdir" ) == 0
|| line_start.find( ".manifest" ) != string::npos )
{
mgr.stop_message( content );
content.clear();
capture_lines = false;
}
else if ( line_start.find( "execute-test" ) != string::npos
|| line_start.find( "capture-output" ) != string::npos )
{
if ( line_start.find( "...failed " ) != string::npos )
{
mgr.stop_message( "run", target_directory( line ),
"fail", timestamp(), content );
content = "\n";
capture_lines = true;
}
else
{
string target_dir( target_directory( line ) );
mgr.start_message( "run", target_dir,
test_name( target_dir ), toolset( target_dir ), content );
// contents of .output file for content
capture_lines = false;
content = "\n";
fs::ifstream file( locate_root / target_dir
/ (test_name(target_dir) + ".output") );
if ( file )
{
string ln;
while ( std::getline( file, ln ) )
{
if ( ln.find( "<note>" ) != string::npos ) mgr.note( true );
append_html( ln, content );
content += "\n";
}
}
}
}
// bjam indicates some prior dependency failed by a "...skipped" message
else if ( line_start.find( "...skipped" ) != string::npos
&& line.find( "<directory-grist>" ) == string::npos
)
{
mgr.stop_message( content );
content.clear();
capture_lines = false;
if ( line.find( " for lack of " ) != string::npos )
{
capture_lines = ( line.find( ".run for lack of " ) == string::npos );
string target_dir;
string lib_dir;
parse_skipped_msg( line, target_dir, lib_dir );
if ( target_dir != lib_dir ) // it's a lib problem
{
mgr.start_message( "lib", target_dir,
test_name( target_dir ), toolset( target_dir ), content );
content = lib_dir;
mgr.stop_message( "lib", target_dir, "fail", timestamp(), content );
content = "\n";
}
}
}
else if ( line_start.find( "**passed**" ) != string::npos
|| line_start.find( "failed-test-file" ) != string::npos
|| line_start.find( "command-file-dump" ) != string::npos )
{
mgr.stop_message( content );
content = "\n";
capture_lines = true;
}
else if ( capture_lines ) // hang onto lines for possible later use
{
append_html( line, content );;
content += "\n";
}
}
mgr.stop_message( content );
if (input != &std::cin)
delete input;
return 0;
return process_jam_log( args );
}

View File

@@ -1,27 +0,0 @@
This folder keeps scripts the produce the Boost regression test tables.
The entry point is the boost_wide_report.py script. In the simplest
case, it should be run as:
python boost_wide_report.py
--locate-root=XXX
--results-dir=YYY
--tag trunk
--expected-results=XXX
--failures-markup=XXX
The 'trunk' is the tag of things that are tested, and should match the
directory name on the server keeping uploaded individual results.
'results-dir' is a directory where individual results (zip files) will
be downloaded, and then processed. expected-results and failures-markup
should be paths to corresponding files in 'status' subdir of boost tree.
locate-root should point at boost root, it's unclear if it of any use
now.
This will download and process *all* test results, but it will not
upload them, so good for local testing. It's possible to run
this command, interrupt it while it processes results, leave just
a few .zip files in result dir, and then re-run with --dont-collect-logs
option, to use downloaded zips only.

View File

@@ -1,893 +0,0 @@
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import shutil
import codecs
import xml.sax.handler
import xml.sax.saxutils
import glob
import re
import os.path
import os
import string
import time
import sys
import ftplib
import utils
report_types = [ 'us', 'ds', 'ud', 'dd', 'l', 'p', 'i', 'n', 'ddr', 'dsr', 'udr', 'usr' ]
if __name__ == '__main__':
run_dir = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
else:
run_dir = os.path.abspath( os.path.dirname( sys.modules[ __name__ ].__file__ ) )
def map_path( path ):
return os.path.join( run_dir, path )
def xsl_path( xsl_file_name ):
return map_path( os.path.join( 'xsl/v2', xsl_file_name ) )
class file_info:
def __init__( self, file_name, file_size, file_date ):
self.name = file_name
self.size = file_size
self.date = file_date
def __repr__( self ):
return "name: %s, size: %s, date %s" % ( self.name, self.size, self.date )
#
# Find the mod time from unix format directory listing line
#
def get_date( f, words ):
# f is an ftp object
(response, modtime) = f.sendcmd('MDTM %s' % words[-1]).split( None, 2 )
year = int( modtime[0:4] )
month = int( modtime[4:6] )
day = int( modtime[6:8] )
hours = int( modtime[8:10] )
minutes = int( modtime[10:12] )
seconds = int( modtime[12:14] )
return ( year, month, day, hours, minutes, seconds, 0, 0, 0)
def list_ftp( f, filter_runners = None ):
# f is an ftp object
utils.log( "listing source content" )
lines = []
# 1. get all lines
f.dir( lambda x: lines.append( x ) )
# 2. split lines into words
word_lines = [ x.split( None, 8 ) for x in lines ]
if filter_runners != None:
word_lines = [ x for x in word_lines if re.match( filter_runners, x[-1], re.IGNORECASE ) ]
# we don't need directories
result = [ file_info( l[-1], int( l[4] ), get_date( f, l ) ) for l in word_lines if l[0][0] != "d" ]
for f in result:
utils.log( " %s" % f )
return result
def list_dir( dir ):
utils.log( "listing destination content %s" % dir )
result = []
for file_path in glob.glob( os.path.join( dir, "*.zip" ) ):
if os.path.isfile( file_path ):
mod_time = time.gmtime( os.path.getmtime( file_path ) )
mod_time = ( mod_time[0], mod_time[1], mod_time[2], mod_time[3], mod_time[4], mod_time[5], 0, 0, mod_time[8] )
size = os.path.getsize( file_path )
result.append( file_info( os.path.basename( file_path ), size, mod_time ) )
for fi in result:
utils.log( " %s" % fi )
return result
def find_by_name( d, name ):
for dd in d:
if dd.name == name:
return dd
return None
# Proof:
# gmtime(result) = time_tuple
# mktime(gmtime(result)) = mktime(time_tuple)
# correction = mktime(gmtime(result)) - result
# result = mktime(time_tuple) - correction
def mkgmtime(time_tuple):
# treat the tuple as if it were local time
local = time.mktime(time_tuple)
# calculate the correction to get gmtime
old_correction = 0
correction = time.mktime(time.gmtime(local)) - local
result = local
# iterate until the correction doesn't change
while correction != old_correction:
old_correction = correction
correction = time.mktime(time.gmtime(result)) - result
result = local - correction
return result
def diff( source_dir_content, destination_dir_content ):
utils.log( "Finding updated files" )
result = ( [], [] ) # ( changed_files, obsolete_files )
for source_file in source_dir_content:
found = find_by_name( destination_dir_content, source_file.name )
if found is None: result[0].append( source_file.name )
elif time.mktime( found.date ) != time.mktime( source_file.date ) or \
found.size != source_file.size:
result[0].append( source_file.name )
else:
pass
for destination_file in destination_dir_content:
found = find_by_name( source_dir_content, destination_file.name )
if found is None: result[1].append( destination_file.name )
utils.log( " Updated files:" )
for f in result[0]:
utils.log( " %s" % f )
utils.log( " Obsolete files:" )
for f in result[1]:
utils.log( " %s" % f )
return result
def _modtime_timestamp( file ):
return os.stat( file ).st_mtime
root_paths = []
def shorten( file_path ):
root_paths.sort( lambda x, y: cmp( len(y ), len( x ) ) )
for root in root_paths:
if file_path.lower().startswith( root.lower() ):
return file_path[ len( root ): ].replace( "\\", "/" )
return file_path.replace( "\\", "/" )
class action:
def __init__( self, file_path ):
self.file_path_ = file_path
self.relevant_paths_ = [ self.file_path_ ]
self.boost_paths_ = []
self.dependencies_ = []
self.other_results_ = []
def run( self ):
utils.log( "%s: run" % shorten( self.file_path_ ) )
__log__ = 2
for dependency in self.dependencies_:
if not os.path.exists( dependency ):
utils.log( "%s doesn't exists, removing target" % shorten( dependency ) )
self.clean()
return
if not os.path.exists( self.file_path_ ):
utils.log( "target doesn't exists, building" )
self.update()
return
dst_timestamp = _modtime_timestamp( self.file_path_ )
utils.log( " target: %s [%s]" % ( shorten( self.file_path_ ), dst_timestamp ) )
needs_updating = 0
utils.log( " dependencies:" )
for dependency in self.dependencies_:
dm = _modtime_timestamp( dependency )
update_mark = ""
if dm > dst_timestamp:
needs_updating = 1
utils.log( ' %s [%s] %s' % ( shorten( dependency ), dm, update_mark ) )
if needs_updating:
utils.log( "target needs updating, rebuilding" )
self.update()
return
else:
utils.log( "target is up-to-date" )
def clean( self ):
to_unlink = self.other_results_ + [ self.file_path_ ]
for result in to_unlink:
utils.log( ' Deleting obsolete "%s"' % shorten( result ) )
if os.path.exists( result ):
os.unlink( result )
class merge_xml_action( action ):
def __init__( self, source, destination, expected_results_file, failures_markup_file, tag ):
action.__init__( self, destination )
self.source_ = source
self.destination_ = destination
self.tag_ = tag
self.expected_results_file_ = expected_results_file
self.failures_markup_file_ = failures_markup_file
self.dependencies_.extend( [
self.source_
, self.expected_results_file_
, self.failures_markup_file_
]
)
self.relevant_paths_.extend( [ self.source_ ] )
self.boost_paths_.extend( [ self.expected_results_file_, self.failures_markup_file_ ] )
def update( self ):
def filter_xml( src, dest ):
class xmlgen( xml.sax.saxutils.XMLGenerator ):
def __init__( self, writer ):
xml.sax.saxutils.XMLGenerator.__init__( self, writer )
self.trimmed = 0
self.character_content = ""
def startElement( self, name, attrs):
self.flush()
xml.sax.saxutils.XMLGenerator.startElement( self, name, attrs )
def endElement( self, name ):
self.flush()
xml.sax.saxutils.XMLGenerator.endElement( self, name )
def flush( self ):
content = self.character_content
self.character_content = ""
self.trimmed = 0
xml.sax.saxutils.XMLGenerator.characters( self, content )
def characters( self, content ):
if not self.trimmed:
max_size = pow( 2, 16 )
self.character_content += content
if len( self.character_content ) > max_size:
self.character_content = self.character_content[ : max_size ] + "...\n\n[The content has been trimmed by the report system because it exceeds %d bytes]" % max_size
self.trimmed = 1
o = open( dest, "w" )
try:
gen = xmlgen( o )
xml.sax.parse( src, gen )
finally:
o.close()
return dest
utils.log( 'Merging "%s" with expected results...' % shorten( self.source_ ) )
try:
trimmed_source = filter_xml( self.source_, '%s-trimmed.xml' % os.path.splitext( self.source_ )[0] )
utils.libxslt(
utils.log
, trimmed_source
, xsl_path( 'add_expected_results.xsl' )
, self.file_path_
, {
"expected_results_file" : self.expected_results_file_
, "failures_markup_file": self.failures_markup_file_
, "source" : self.tag_
}
)
os.unlink( trimmed_source )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
if os.path.exists( self.file_path_ ):
os.unlink( self.file_path_ )
def _xml_timestamp( xml_path ):
class timestamp_reader( xml.sax.handler.ContentHandler ):
def startElement( self, name, attrs ):
if name == 'test-run':
self.timestamp = attrs.getValue( 'timestamp' )
raise self
try:
xml.sax.parse( xml_path, timestamp_reader() )
raise 'Cannot extract timestamp from "%s". Invalid XML file format?' % xml_path
except timestamp_reader, x:
return x.timestamp
class make_links_action( action ):
def __init__( self, source, destination, output_dir, tag, run_date, comment_file, failures_markup_file ):
action.__init__( self, destination )
self.dependencies_.append( source )
self.source_ = source
self.output_dir_ = output_dir
self.tag_ = tag
self.run_date_ = run_date
self.comment_file_ = comment_file
self.failures_markup_file_ = failures_markup_file
self.links_file_path_ = os.path.join( output_dir, 'links.html' )
def update( self ):
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "output" ) )
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "developer", "output" ) )
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "user", "output" ) )
utils.log( ' Making test output files...' )
try:
utils.libxslt(
utils.log
, self.source_
, xsl_path( 'links_page.xsl' )
, self.links_file_path_
, {
'source': self.tag_
, 'run_date': self.run_date_
, 'comment_file': self.comment_file_
, 'explicit_markup_file': self.failures_markup_file_
}
)
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
open( self.file_path_, "w" ).close()
class unzip_action( action ):
def __init__( self, source, destination, unzip_func ):
action.__init__( self, destination )
self.dependencies_.append( source )
self.source_ = source
self.unzip_func_ = unzip_func
def update( self ):
try:
utils.log( ' Unzipping "%s" ... into "%s"' % ( shorten( self.source_ ), os.path.dirname( self.file_path_ ) ) )
self.unzip_func_( self.source_, os.path.dirname( self.file_path_ ) )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
def ftp_task( site, site_path , destination, filter_runners = None ):
__log__ = 1
utils.log( '' )
utils.log( 'ftp_task: "ftp://%s/%s" -> %s' % ( site, site_path, destination ) )
utils.log( ' logging on ftp site %s' % site )
f = ftplib.FTP( site )
f.login()
utils.log( ' cwd to "%s"' % site_path )
f.cwd( site_path )
source_content = list_ftp( f, filter_runners )
source_content = [ x for x in source_content if re.match( r'.+[.](?<!log[.])zip', x.name ) and x.name.lower() != 'boostbook.zip' ]
destination_content = list_dir( destination )
d = diff( source_content, destination_content )
def synchronize():
for source in d[0]:
utils.log( 'Copying "%s"' % source )
result = open( os.path.join( destination, source ), 'wb' )
f.retrbinary( 'RETR %s' % source, result.write )
result.close()
mod_date = find_by_name( source_content, source ).date
m = mkgmtime( mod_date )
os.utime( os.path.join( destination, source ), ( m, m ) )
for obsolete in d[1]:
utils.log( 'Deleting "%s"' % obsolete )
os.unlink( os.path.join( destination, obsolete ) )
utils.log( " Synchronizing..." )
__log__ = 2
synchronize()
f.quit()
def unzip_archives_task( source_dir, processed_dir, unzip_func ):
utils.log( '' )
utils.log( 'unzip_archives_task: unpacking updated archives in "%s" into "%s"...' % ( source_dir, processed_dir ) )
__log__ = 1
target_files = [ os.path.join( processed_dir, os.path.basename( x.replace( ".zip", ".xml" ) ) ) for x in glob.glob( os.path.join( source_dir, "*.zip" ) ) ] + glob.glob( os.path.join( processed_dir, "*.xml" ) )
actions = [ unzip_action( os.path.join( source_dir, os.path.basename( x.replace( ".xml", ".zip" ) ) ), x, unzip_func ) for x in target_files ]
for a in actions:
a.run()
def merge_xmls_task( source_dir, processed_dir, merged_dir, expected_results_file, failures_markup_file, tag ):
utils.log( '' )
utils.log( 'merge_xmls_task: merging updated XMLs in "%s"...' % source_dir )
__log__ = 1
utils.makedirs( merged_dir )
target_files = [ os.path.join( merged_dir, os.path.basename( x ) ) for x in glob.glob( os.path.join( processed_dir, "*.xml" ) ) ] + glob.glob( os.path.join( merged_dir, "*.xml" ) )
actions = [ merge_xml_action( os.path.join( processed_dir, os.path.basename( x ) )
, x
, expected_results_file
, failures_markup_file
, tag ) for x in target_files ]
for a in actions:
a.run()
def make_links_task( input_dir, output_dir, tag, run_date, comment_file, extended_test_results, failures_markup_file ):
utils.log( '' )
utils.log( 'make_links_task: make output files for test results in "%s"...' % input_dir )
__log__ = 1
target_files = [ x + ".links" for x in glob.glob( os.path.join( input_dir, "*.xml" ) ) ] + glob.glob( os.path.join( input_dir, "*.links" ) )
actions = [ make_links_action( x.replace( ".links", "" )
, x
, output_dir
, tag
, run_date
, comment_file
, failures_markup_file
) for x in target_files ]
for a in actions:
a.run()
class xmlgen( xml.sax.saxutils.XMLGenerator ):
document_started = 0
def startDocument( self ):
if not self.document_started:
xml.sax.saxutils.XMLGenerator.startDocument( self )
self.document_started = 1
def merge_processed_test_runs( test_runs_dir, tag, writer ):
utils.log( '' )
utils.log( 'merge_processed_test_runs: merging processed test runs from %s into a single XML...' % test_runs_dir )
__log__ = 1
all_runs_xml = xmlgen( writer, encoding='utf-8' )
all_runs_xml.startDocument()
all_runs_xml.startElement( 'all-test-runs', {} )
files = glob.glob( os.path.join( test_runs_dir, '*.xml' ) )
for test_run in files:
#file_pos = writer.stream.tell()
file_pos = writer.tell()
try:
utils.log( ' Writing "%s" into the resulting XML...' % test_run )
xml.sax.parse( test_run, all_runs_xml )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( test_run, msg ) )
#writer.stream.seek( file_pos )
#writer.stream.truncate()
writer.seek( file_pos )
writer.truncate()
all_runs_xml.endElement( 'all-test-runs' )
all_runs_xml.endDocument()
def execute_tasks(
tag
, user
, run_date
, comment_file
, results_dir
, output_dir
, reports
, warnings
, extended_test_results
, dont_collect_logs
, expected_results_file
, failures_markup_file
, report_executable
, filter_runners
):
incoming_dir = os.path.join( results_dir, 'incoming', tag )
processed_dir = os.path.join( incoming_dir, 'processed' )
merged_dir = os.path.join( processed_dir, 'merged' )
if not os.path.exists( incoming_dir ):
os.makedirs( incoming_dir )
if not os.path.exists( processed_dir ):
os.makedirs( processed_dir )
if not os.path.exists( merged_dir ):
os.makedirs( merged_dir )
if not dont_collect_logs:
ftp_site = 'boost.cowic.de'
site_path = '/boost/do-not-publish-this-url/results/%s' % tag
ftp_task( ftp_site, site_path, incoming_dir, filter_runners )
unzip_archives_task( incoming_dir, processed_dir, utils.unzip )
if report_executable:
if not os.path.exists( merged_dir ):
os.makedirs( merged_dir )
command_line = report_executable
command_line += " --expected " + '"%s"' % expected_results_file
command_line += " --markup " + '"%s"' % failures_markup_file
command_line += " --comment " + '"%s"' % comment_file
command_line += " --tag " + tag
# command_line += " --run-date " + '"%s"' % run_date
command_line += " -rl"
for r in reports:
command_line += ' -r' + r
command_line += " --css " + xsl_path( 'html/master.css' )
for f in glob.glob( os.path.join( processed_dir, '*.xml' ) ):
command_line += ' "%s"' % f
utils.log("Producing the reports...")
utils.log("> "+command_line)
os.system(command_line)
return
merge_xmls_task( incoming_dir, processed_dir, merged_dir, expected_results_file, failures_markup_file, tag )
make_links_task( merged_dir
, output_dir
, tag
, run_date
, comment_file
, extended_test_results
, failures_markup_file )
results_xml_path = os.path.join( output_dir, 'extended_test_results.xml' )
#writer = codecs.open( results_xml_path, 'w', 'utf-8' )
writer = open( results_xml_path, 'w' )
merge_processed_test_runs( merged_dir, tag, writer )
writer.close()
make_result_pages(
extended_test_results
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, output_dir
, reports
, warnings
)
def make_result_pages(
extended_test_results
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, output_dir
, reports
, warnings
):
utils.log( 'Producing the reports...' )
__log__ = 1
warnings_text = '+'.join( warnings )
if comment_file != '':
comment_file = os.path.abspath( comment_file )
links = os.path.join( output_dir, 'links.html' )
utils.makedirs( os.path.join( output_dir, 'output' ) )
for mode in ( 'developer', 'user' ):
utils.makedirs( os.path.join( output_dir, mode , 'output' ) )
issues = os.path.join( output_dir, 'developer', 'issues.html' )
if 'i' in reports:
utils.log( ' Making issues list...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'issues_page.xsl' )
, issues
, {
'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file': failures_markup_file
, 'release': "yes"
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'd' in reports:
utils.log( ' Making detailed %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl' )
, os.path.join( output_dir, mode, 'index.html' )
, {
'links_file': 'links.html'
, 'mode': mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 's' in reports:
utils.log( ' Making summary %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl' )
, os.path.join( output_dir, mode, 'summary.html' )
, {
'mode' : mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'dr' in reports:
utils.log( ' Making detailed %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl' )
, os.path.join( output_dir, mode, 'index_release.html' )
, {
'links_file': 'links.html'
, 'mode': mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
, 'release': "yes"
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'sr' in reports:
utils.log( ' Making summary %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl' )
, os.path.join( output_dir, mode, 'summary_release.html' )
, {
'mode' : mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
, 'release': 'yes'
}
)
if 'e' in reports:
utils.log( ' Generating expected_results ...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'produce_expected_results.xsl' )
, os.path.join( output_dir, 'expected_results.xml' )
)
if 'n' in reports:
utils.log( ' Making runner comment files...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'runners.xsl' )
, os.path.join( output_dir, 'runners.html' )
)
shutil.copyfile(
xsl_path( 'html/master.css' )
, os.path.join( output_dir, 'master.css' )
)
fix_file_names( output_dir )
def fix_file_names( dir ):
"""
The current version of xslproc doesn't correctly handle
spaces. We have to manually go through the
result set and decode encoded spaces (%20).
"""
utils.log( 'Fixing encoded file names...' )
for root, dirs, files in os.walk( dir ):
for file in files:
if file.find( "%20" ) > -1:
new_name = file.replace( "%20", " " )
utils.rename(
utils.log
, os.path.join( root, file )
, os.path.join( root, new_name )
)
def build_xsl_reports(
locate_root_dir
, tag
, expected_results_file
, failures_markup_file
, comment_file
, results_dir
, result_file_prefix
, dont_collect_logs = 0
, reports = report_types
, report_executable = None
, warnings = []
, user = None
, upload = False
, filter_runners = None
):
( run_date ) = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.gmtime() )
root_paths.append( locate_root_dir )
root_paths.append( results_dir )
bin_boost_dir = os.path.join( locate_root_dir, 'bin', 'boost' )
output_dir = os.path.join( results_dir, result_file_prefix )
utils.makedirs( output_dir )
if expected_results_file != '':
expected_results_file = os.path.abspath( expected_results_file )
else:
expected_results_file = os.path.abspath( map_path( 'empty_expected_results.xml' ) )
extended_test_results = os.path.join( output_dir, 'extended_test_results.xml' )
execute_tasks(
tag
, user
, run_date
, comment_file
, results_dir
, output_dir
, reports
, warnings
, extended_test_results
, dont_collect_logs
, expected_results_file
, failures_markup_file
, report_executable
, filter_runners
)
if upload:
upload_dir = 'regression-logs/'
utils.log( 'Uploading results into "%s" [connecting as %s]...' % ( upload_dir, user ) )
archive_name = '%s.tar.gz' % result_file_prefix
utils.tar(
os.path.join( results_dir, result_file_prefix )
, archive_name
)
utils.sourceforge.upload( os.path.join( results_dir, archive_name ), upload_dir, user )
utils.sourceforge.untar( os.path.join( upload_dir, archive_name ), user, background = True )
def accept_args( args ):
args_spec = [
'locate-root='
, 'tag='
, 'expected-results='
, 'failures-markup='
, 'comment='
, 'results-dir='
, 'results-prefix='
, 'dont-collect-logs'
, 'reports='
, 'boost-report='
, 'user='
, 'upload'
, 'help'
, 'filter-runners='
]
options = {
'--comment': ''
, '--expected-results': ''
, '--failures-markup': ''
, '--reports': string.join( report_types, ',' )
, '--boost-report': None
, '--tag': None
, '--user': None
, 'upload': False
, '--filter-runners': None
}
utils.accept_args( args_spec, args, options, usage )
if not options.has_key( '--results-dir' ):
options[ '--results-dir' ] = options[ '--locate-root' ]
if not options.has_key( '--results-prefix' ):
options[ '--results-prefix' ] = 'all'
warnings = []
return (
options[ '--locate-root' ]
, options[ '--tag' ]
, options[ '--expected-results' ]
, options[ '--failures-markup' ]
, options[ '--comment' ]
, options[ '--results-dir' ]
, options[ '--results-prefix' ]
, options.has_key( '--dont-collect-logs' )
, options[ '--reports' ].split( ',' )
, options[ '--boost-report' ]
, warnings
, options[ '--user' ]
, options.has_key( '--upload' )
, options[ '--filter-runners' ]
)
def usage():
print 'Usage: %s [options]' % os.path.basename( sys.argv[0] )
print '''
\t--locate-root the same as --locate-root in compiler_status
\t--tag the tag for the results (i.e. 'trunk')
\t--expected-results the file with the results to be compared with
\t the current run
\t--failures-markup the file with the failures markup
\t--comment an html comment file (will be inserted in the reports)
\t--results-dir the directory containing -links.html, -fail.html
\t files produced by compiler_status (by default the
\t same as specified in --locate-root)
\t--results-prefix the prefix of -links.html, -fail.html
\t files produced by compiler_status
\t--user SourceForge user name for a shell account
\t--upload upload reports to SourceForge
The following options are useful in debugging:
\t--dont-collect-logs dont collect the test logs
\t--reports produce only the specified reports
\t us - user summary
\t ds - developer summary
\t ud - user detailed
\t dd - developer detailed
\t l - links
\t p - patches
\t x - extended results file
\t i - issues
\t n - runner comment files
\t--filter-runners use only those runners that match specified
\t regex (case insensitive)
'''
def main():
build_xsl_reports( *accept_args( sys.argv[ 1 : ] ) )
if __name__ == '__main__':
main()

View File

@@ -1,179 +0,0 @@
import ftplib
import optparse
import os
import time
import urlparse
import utils
import shutil
import sys
import zipfile
import xml.sax.saxutils
import utils.libxslt
def get_date( words ):
date = words[ 5: -1 ]
t = time.localtime()
month_names = [ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ]
year = time.localtime()[0] # If year is not secified is it the current year
month = month_names.index( date[0] ) + 1
day = int( date[1] )
hours = 0
minutes = 0
if date[2].find( ":" ) != -1:
( hours, minutes ) = [ int(x) for x in date[2].split( ":" ) ]
else:
# there is no way to get seconds for not current year dates
year = int( date[2] )
return ( year, month, day, hours, minutes, 0, 0, 0, 0 )
#def check_for_new_upload( target_dir, boostbook_info ):
def accept_args( args ):
parser = optparse.OptionParser()
parser.add_option( '-t', '--tag', dest='tag', help="the tag for the results (i.e. 'RC_1_34_0')" )
parser.add_option( '-d', '--destination', dest='destination', help='destination directory' )
if len(args) == 0:
parser.print_help()
sys.exit( 1 )
(options, args) = parser.parse_args()
if not options.destination:
print '-d is required'
parser.print_help()
sys.exit( 1 )
return options
def unzip( archive_path, result_dir ):
utils.log( 'Unpacking %s into %s' % ( archive_path, result_dir ) )
z = zipfile.ZipFile( archive_path, 'r', zipfile.ZIP_DEFLATED )
for f in z.infolist():
dir = os.path.join( result_dir, os.path.dirname( f.filename ) )
if not os.path.exists( dir ):
os.makedirs( dir )
result = open( os.path.join( result_dir, f.filename ), 'wb' )
result.write( z.read( f.filename ) )
result.close()
z.close()
def boostbook_report( options ):
site = 'fx.meta-comm.com'
site_path = '/boost-regression/%s' % options.tag
utils.log( 'Opening %s ...' % site )
f = ftplib.FTP( site )
f.login()
utils.log( ' cd %s ...' % site_path )
f.cwd( site_path )
utils.log( ' dir' )
lines = []
f.dir( lambda x: lines.append( x ) )
word_lines = [ x.split( None, 8 ) for x in lines ]
boostbook_info = [ ( l[-1], get_date( l ) ) for l in word_lines if l[-1] == "BoostBook.zip" ]
if len( boostbook_info ) > 0:
boostbook_info = boostbook_info[0]
utils.log( 'BoostBook found! (%s)' % ( boostbook_info, ) )
local_copy = os.path.join( options.destination,'BoostBook-%s.zip' % options.tag )
if 1:
if os.path.exists( local_copy ):
utils.log( 'Local copy exists. Checking if it is older than uploaded one...' )
uploaded_mtime = time.mktime( boostbook_info[1] )
local_mtime = os.path.getmtime( local_copy )
utils.log( ' uploaded: %s %s, local: %s %s' %
( uploaded_mtime
, boostbook_info[1]
, local_mtime
, time.localtime( local_mtime )) )
modtime = time.localtime( os.path.getmtime( local_copy ) )
if uploaded_mtime <= local_mtime:
utils.log( 'Local copy is newer: exiting' )
sys.exit()
if 1:
temp = os.path.join( options.destination,'BoostBook.zip' )
result = open( temp, 'wb' )
f.retrbinary( 'RETR %s' % boostbook_info[0], result.write )
result.close()
if os.path.exists( local_copy ):
os.unlink( local_copy )
os.rename( temp, local_copy )
m = time.mktime( boostbook_info[1] )
os.utime( local_copy, ( m, m ) )
docs_name = os.path.splitext( os.path.basename( local_copy ) )[0]
if 1:
unpacked_docs_dir = os.path.join( options.destination, docs_name )
utils.log( 'Dir %s ' % unpacked_docs_dir )
if os.path.exists( unpacked_docs_dir ):
utils.log( 'Cleaning up...' )
shutil.rmtree( unpacked_docs_dir )
os.makedirs( unpacked_docs_dir )
unzip( local_copy, unpacked_docs_dir )
utils.system( [ 'cd %s' % unpacked_docs_dir
, 'tar -c -f ../%s.tar.gz -z --exclude=tarball *' % docs_name ] )
process_boostbook_build_log( os.path.join( unpacked_docs_dir, 'boostbook.log' ), read_timestamp( unpacked_docs_dir ) )
utils.libxslt( log
, os.path.abspath( os.path.join( unpacked_docs_dir, 'boostbook.log.xml' ) )
, os.path.abspath( os.path.join( os.path.dirname( __file__ ), 'xsl', 'v2', 'boostbook_log.xsl' ) )
, os.path.abspath( os.path.join( unpacked_docs_dir, 'boostbook.log.html' ) ) )
def log( msg ):
print msg
def process_boostbook_build_log( path, timestamp ):
f = open( path + '.xml', 'w' )
g = xml.sax.saxutils.XMLGenerator( f )
lines = open( path ).read().splitlines()
output_lines = []
result = 'success'
for line in lines:
type = 'output'
if line.startswith( '...failed' ):
type = 'failure'
result='failure'
if line.startswith( 'runtime error:' ):
type = 'failure'
if line.startswith( '...skipped' ):
type = 'skipped'
output_lines.append( ( type, line ) )
g.startDocument()
g.startElement( 'build', { 'result': result, 'timestamp': timestamp } )
for line in output_lines:
g.startElement( 'line', { 'type': line[0]} )
g.characters( line[1] )
g.endElement( 'line' )
g.endElement( 'build' )
g.endDocument()
def read_timestamp( docs_directory ):
f = open( os.path.join( docs_directory, 'timestamp' ) )
try:
return f.readline()
finally:
f.close()
def main():
options = accept_args( sys.argv[1:])
boostbook_report( options )
if __name__ == '__main__':
main()

View File

@@ -1,135 +0,0 @@
#!/bin/sh
#~ Copyright Redshift Software, Inc. 2007-2008
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
set -e
build_all()
{
update_tools ${1} ${2}
build_results ${1} ${2}
upload_results ${1} ${2}
}
update_tools()
{
cwd=`pwd`
cd boost
git pull
cd "${cwd}"
}
report_info()
{
cat - > comment.html <<HTML
<table style="border-spacing: 0.5em;">
<tr>
<td style="vertical-align: top;"><tt>uname</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uname -a`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>uptime</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uptime`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>vmstat</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`vmstat`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>xsltproc</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`xsltproc --version`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>python</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`python --version 2>&1`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">previous run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`cat previous.txt`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">current run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`date -u`
</pre>
</td>
</tr>
</table>
HTML
date -u > previous.txt
}
build_results()
{
tag=${1?'error: command line missing branch-name argument'}
reports="dd,ds,i,n"
cwd=`pwd`
cd ${1}
root=`pwd`
boost=${cwd}/boost
if [ -x ${cwd}/boost_report ]; then
report_opt=--boost-report=${cwd}/boost_report
fi
report_info
python "${boost}/tools/regression/xsl_reports/boost_wide_report.py" \
--locate-root="${root}" \
--tag=${tag} \
--expected-results="${boost}/status/expected_results.xml" \
--failures-markup="${boost}/status/explicit-failures-markup.xml" \
--comment="comment.html" \
--user="" \
--reports=${reports} \
${report_opt}
cd "${cwd}"
}
upload_results()
{
cwd=`pwd`
upload_dir=/home/grafik/www.boost.org/testing
if [ -f ${1}/report.zip ]; then
mv ${1}/report.zip ${1}.zip
else
cd ${1}/all
rm -f ../../${1}.zip*
#~ zip -q -r -9 ../../${1} * -x '*.xml'
7za a -tzip -mx=9 ../../${1}.zip * '-x!*.xml'
cd "${cwd}"
fi
mv ${1}.zip ${1}.zip.uploading
rsync -vuz --rsh=ssh --stats \
${1}.zip.uploading grafik@beta.boost.org:/${upload_dir}/incoming/
ssh grafik@beta.boost.org \
cp --no-preserve=timestamps ${upload_dir}/incoming/${1}.zip.uploading ${upload_dir}/live/${1}.zip
mv ${1}.zip.uploading ${1}.zip
}
build_all ${1} ${2}

View File

@@ -1,226 +0,0 @@
#!/bin/sh
#~ Copyright Rene Rivera 2014-2015
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
set -e
log_time()
{
echo `date` "::" $1 >> boost-reports-time.log
}
build_all()
{
log_time "Start of testing. [build_all]"
build_setup
update_tools
build_results develop 2>&1 | tee boost-reports/develop.log
build_results master 2>&1 | tee boost-reports/master.log
upload_results develop
upload_results master
log_time "End of testing. [build_all]"
}
git_update()
{
cwd=`pwd`
if [ -d "${1}" ]; then
cd "${1}"
git remote set-branches --add origin "${2}"
git pull --recurse-submodules
git checkout "${2}"
else
mkdir -p "${1}"
git init "${1}"
cd "${1}"
git remote add --no-tags -t "${2}" origin "${3}"
git fetch --depth=1
git checkout "${2}"
fi
cd "${cwd}"
}
git_submodule_update()
{
cwd=`pwd`
cd "${1}"
git submodule update --init "${2}"
cd "${cwd}"
}
build_setup()
{
log_time "Get tools. [build_setup]"
cwd=`pwd`
mkdir -p boost-reports/develop
mkdir -p boost-reports/master
log_time "Git; boost_root [build_setup]"
git_update "${cwd}/boost-reports/boost_root" master 'https://github.com/boostorg/boost.git'
git_submodule_update "${cwd}/boost-reports/boost_root" libs/algorithm
git_submodule_update "${cwd}/boost-reports/boost_root" libs/any
git_submodule_update "${cwd}/boost-reports/boost_root" libs/array
git_submodule_update "${cwd}/boost-reports/boost_root" libs/assert
git_submodule_update "${cwd}/boost-reports/boost_root" libs/bind
git_submodule_update "${cwd}/boost-reports/boost_root" libs/concept_check
git_submodule_update "${cwd}/boost-reports/boost_root" libs/config
git_submodule_update "${cwd}/boost-reports/boost_root" libs/container
git_submodule_update "${cwd}/boost-reports/boost_root" libs/core
git_submodule_update "${cwd}/boost-reports/boost_root" libs/crc
git_submodule_update "${cwd}/boost-reports/boost_root" libs/date_time
git_submodule_update "${cwd}/boost-reports/boost_root" libs/detail
git_submodule_update "${cwd}/boost-reports/boost_root" libs/exception
git_submodule_update "${cwd}/boost-reports/boost_root" libs/filesystem
git_submodule_update "${cwd}/boost-reports/boost_root" libs/foreach
git_submodule_update "${cwd}/boost-reports/boost_root" libs/format
git_submodule_update "${cwd}/boost-reports/boost_root" libs/function
git_submodule_update "${cwd}/boost-reports/boost_root" libs/functional
git_submodule_update "${cwd}/boost-reports/boost_root" libs/integer
git_submodule_update "${cwd}/boost-reports/boost_root" libs/io
git_submodule_update "${cwd}/boost-reports/boost_root" libs/iostreams
git_submodule_update "${cwd}/boost-reports/boost_root" libs/iterator
git_submodule_update "${cwd}/boost-reports/boost_root" libs/lexical_cast
git_submodule_update "${cwd}/boost-reports/boost_root" libs/math
git_submodule_update "${cwd}/boost-reports/boost_root" libs/move
git_submodule_update "${cwd}/boost-reports/boost_root" libs/mpl
git_submodule_update "${cwd}/boost-reports/boost_root" libs/numeric/conversion
git_submodule_update "${cwd}/boost-reports/boost_root" libs/optional
git_submodule_update "${cwd}/boost-reports/boost_root" libs/predef
git_submodule_update "${cwd}/boost-reports/boost_root" libs/preprocessor
git_submodule_update "${cwd}/boost-reports/boost_root" libs/property_tree
git_submodule_update "${cwd}/boost-reports/boost_root" libs/program_options
git_submodule_update "${cwd}/boost-reports/boost_root" libs/range
git_submodule_update "${cwd}/boost-reports/boost_root" libs/regex
git_submodule_update "${cwd}/boost-reports/boost_root" libs/smart_ptr
git_submodule_update "${cwd}/boost-reports/boost_root" libs/static_assert
git_submodule_update "${cwd}/boost-reports/boost_root" libs/system
git_submodule_update "${cwd}/boost-reports/boost_root" libs/throw_exception
git_submodule_update "${cwd}/boost-reports/boost_root" libs/tokenizer
git_submodule_update "${cwd}/boost-reports/boost_root" libs/tuple
git_submodule_update "${cwd}/boost-reports/boost_root" libs/type_index
git_submodule_update "${cwd}/boost-reports/boost_root" libs/type_traits
git_submodule_update "${cwd}/boost-reports/boost_root" libs/unordered
git_submodule_update "${cwd}/boost-reports/boost_root" libs/utility
git_submodule_update "${cwd}/boost-reports/boost_root" libs/variant
git_submodule_update "${cwd}/boost-reports/boost_root" libs/wave
git_submodule_update "${cwd}/boost-reports/boost_root" tools/inspect
log_time "Git; boost_regression [build_setup]"
git_update "${cwd}/boost-reports/boost_regression" develop 'https://github.com/boostorg/regression.git'
log_time "Git; boost_bb [build_setup]"
git_update "${cwd}/boost-reports/boost_bb" develop 'https://github.com/boostorg/build.git'
cd "${cwd}"
}
update_tools()
{
log_time "Build tools. [update_tools]"
cwd=`pwd`
cd "${cwd}/boost-reports/boost_bb"
./bootstrap.sh
cd "${cwd}/boost-reports/boost_regression/build"
"${cwd}/boost-reports/boost_bb/b2" \
"--boost-build=${cwd}/boost-reports/boost_bb/src" \
"--boost-root=${cwd}/boost-reports/boost_root" bin_boost_report
cd "${cwd}"
}
report_info()
{
cat - > comment.html <<HTML
<table style="border-spacing: 0.5em;">
<tr>
<td style="vertical-align: top;"><tt>uname</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uname -a`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>uptime</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uptime`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>python</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`python --version 2>&1`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">previous run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`cat previous.txt`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">current run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`date -u`
</pre>
</td>
</tr>
</table>
HTML
date -u > previous.txt
}
build_results()
{
tag="${1?'error: command line missing branch-name argument'}"
log_time "Build results for branch ${tag}. [build_results]"
reports="dd,ds,i,n"
cwd=`pwd`
cd boost-reports
cd "${1}"
root=`pwd`
boost=${cwd}/boost-reports/boost_root
report_info
python "${cwd}/boost-reports/boost_regression/xsl_reports/boost_wide_report.py" \
--locate-root="${root}" \
--tag=${tag} \
--expected-results="${boost}/status/expected_results.xml" \
--failures-markup="${boost}/status/explicit-failures-markup.xml" \
--comment="comment.html" \
--user="" \
--reports=${reports} \
"--boost-report=${cwd}/boost-reports/boost_regression/build/bin_boost_report/boost_report"
cd "${cwd}"
}
upload_results()
{
log_time "Upload results for branch $1. [upload_results]"
cwd=`pwd`
cd boost-reports
upload_dir=/home/grafik/www.boost.org/testing
if [ -f ${1}/report.zip ]; then
mv ${1}/report.zip ${1}.zip
else
cd ${1}/all
rm -f ../../${1}.zip*
#~ zip -q -r -9 ../../${1} * -x '*.xml'
7za a -tzip -mx=9 ../../${1}.zip * '-x!*.xml'
cd "${cwd}"
fi
mv ${1}.zip ${1}.zip.uploading
rsync -vuz --rsh=ssh --stats \
${1}.zip.uploading grafik@beta.boost.org:/${upload_dir}/incoming/
ssh grafik@beta.boost.org \
cp --no-preserve=timestamps ${upload_dir}/incoming/${1}.zip.uploading ${upload_dir}/live/${1}.zip
mv ${1}.zip.uploading ${1}.zip
cd "${cwd}"
}
echo "=====-----=====-----=====-----=====-----=====-----=====-----=====-----" >> boost-reports-time.log
build_all 2>&1 | tee boost-reports.log

View File

@@ -1,840 +0,0 @@
#
# Copyright (C) 2005, 2007 The Trustees of Indiana University
# Author: Douglas Gregor
#
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
import re
import smtplib
import os
import time
import string
import datetime
import sys
report_author = "Douglas Gregor <dgregor@osl.iu.edu>"
boost_dev_list = "Boost Developer List <boost@lists.boost.org>"
boost_testing_list = "Boost Testing List <boost-testing@lists.boost.org>"
def sorted_keys( dict ):
result = dict.keys()
result.sort()
return result
class Platform:
"""
All of the failures for a particular platform.
"""
def __init__(self, name):
self.name = name
self.failures = list()
self.maintainers = list()
return
def addFailure(self, failure):
self.failures.append(failure)
return
def isBroken(self):
return len(self.failures) > 300
def addMaintainer(self, maintainer):
"""
Add a new maintainer for this platform.
"""
self.maintainers.append(maintainer)
return
class Failure:
"""
A single test case failure in the report.
"""
def __init__(self, test, platform):
self.test = test
self.platform = platform
return
class Test:
"""
All of the failures for a single test name within a library.
"""
def __init__(self, library, name):
self.library = library
self.name = name
self.failures = list()
return
def addFailure(self, failure):
self.failures.append(failure)
return
def numFailures(self):
return len(self.failures)
def numReportableFailures(self):
"""
Returns the number of failures that we will report to the
maintainers of the library. This doesn't count failures on
broken platforms.
"""
count = 0
for failure in self.failures:
if not failure.platform.isBroken():
count += 1
pass
pass
return count
class Library:
"""
All of the information about the failures in a single library.
"""
def __init__(self, name):
self.name = name
self.maintainers = list()
self.tests = list()
return
def addTest(self, test):
"""
Add another test to the library.
"""
self.tests.append(test)
return
def addMaintainer(self, maintainer):
"""
Add a new maintainer for this library.
"""
self.maintainers.append(maintainer)
return
def numFailures(self):
count = 0
for test in self.tests:
count += test.numFailures()
pass
return count
def numReportableFailures(self):
count = 0
for test in self.tests:
count += test.numReportableFailures()
pass
return count
class Maintainer:
"""
Information about the maintainer of a library
"""
def __init__(self, name, email):
self.name = name
self.email = email
self.libraries = list()
return
def addLibrary(self, library):
self.libraries.append(library)
return
def composeEmail(self, report):
"""
Composes an e-mail to this maintainer with information about
the failures in his or her libraries, omitting those that come
from "broken" platforms. Returns the e-mail text if a message
needs to be sent, or None otherwise.
"""
# Determine if we need to send a message to this developer.
requires_message = False
for library in self.libraries:
if library.numReportableFailures() > 0:
requires_message = True
break
if not requires_message:
return None
# Build the message header
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: """
message += self.name + ' <' + self.email + '>'
message += """
Reply-To: boost@lists.boost.org
Subject: Failures in your Boost libraries as of """
message += str(datetime.date.today()) + " [" + report.branch + "]"
message += """
You are receiving this report because one or more of the libraries you
maintain has regression test failures that are not accounted for.
A full version of the report is sent to the Boost developer's mailing
list.
Detailed report:
"""
message += ' ' + report.url + """
There are failures in these libraries you maintain:
"""
# List the libraries this maintainer is responsible for and
# the number of reportable failures in that library.
for library in self.libraries:
num_failures = library.numReportableFailures()
if num_failures > 0:
message += ' ' + library.name + ' (' + str(num_failures) + ')\n'
pass
pass
# Provide the details for the failures in each library.
for library in self.libraries:
if library.numReportableFailures() > 0:
message += '\n|' + library.name + '|\n'
for test in library.tests:
if test.numReportableFailures() > 0:
message += ' ' + test.name + ':'
for failure in test.failures:
if not failure.platform.isBroken():
message += ' ' + failure.platform.name
pass
pass
message += '\n'
pass
pass
pass
pass
return message
class PlatformMaintainer:
"""
Information about the platform maintainer of a library
"""
def __init__(self, name, email):
self.name = name
self.email = email
self.platforms = list()
return
def addPlatform(self, runner, platform):
self.platforms.append(platform)
return
def composeEmail(self, report):
"""
Composes an e-mail to this platform maintainer if one or more of
the platforms s/he maintains has a large number of failures.
Returns the e-mail text if a message needs to be sent, or None
otherwise.
"""
# Determine if we need to send a message to this developer.
requires_message = False
for platform in self.platforms:
if platform.isBroken():
requires_message = True
break
if not requires_message:
return None
# Build the message header
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: """
message += self.name + ' <' + self.email + '>'
message += """
Reply-To: boost@lists.boost.org
Subject: Large number of Boost failures on a platform you maintain as of """
message += str(datetime.date.today()) + " [" + report.branch + "]"
message += """
You are receiving this report because one or more of the testing
platforms that you maintain has a large number of Boost failures that
are not accounted for. A full version of the report is sent to the
Boost developer's mailing list.
Detailed report:
"""
message += ' ' + report.url + """
The following platforms have a large number of failures:
"""
for platform in self.platforms:
if platform.isBroken():
message += (' ' + platform.name + ' ('
+ str(len(platform.failures)) + ' failures)\n')
return message
class Report:
"""
The complete report of all failing test cases.
"""
def __init__(self, branch = 'trunk'):
self.branch = branch
self.date = None
self.url = None
self.libraries = dict()
self.platforms = dict()
self.maintainers = dict()
self.platform_maintainers = dict()
return
def getPlatform(self, name):
"""
Retrieve the platform with the given name.
"""
if self.platforms.has_key(name):
return self.platforms[name]
else:
self.platforms[name] = Platform(name)
return self.platforms[name]
def getMaintainer(self, name, email):
"""
Retrieve the maintainer with the given name and e-mail address.
"""
if self.maintainers.has_key(name):
return self.maintainers[name]
else:
self.maintainers[name] = Maintainer(name, email)
return self.maintainers[name]
def getPlatformMaintainer(self, name, email):
"""
Retrieve the platform maintainer with the given name and
e-mail address.
"""
if self.platform_maintainers.has_key(name):
return self.platform_maintainers[name]
else:
self.platform_maintainers[name] = PlatformMaintainer(name, email)
return self.platform_maintainers[name]
def parseIssuesEmail(self):
"""
Try to parse the issues e-mail file. Returns True if everything was
successful, false otherwise.
"""
# See if we actually got the file
if not os.path.isfile('issues-email.txt'):
return False
# Determine the set of libraries that have unresolved failures
date_regex = re.compile('Report time: (.*)')
url_regex = re.compile(' (http://.*)')
library_regex = re.compile('\|(.*)\|')
failure_regex = re.compile(' ([^:]*): (.*)')
current_library = None
for line in file('issues-email.txt', 'r'):
# Check for the report time line
m = date_regex.match(line)
if m:
self.date = m.group(1)
continue
# Check for the detailed report URL
m = url_regex.match(line)
if m:
self.url = m.group(1)
continue
# Check for a library header
m = library_regex.match(line)
if m:
current_library = Library(m.group(1))
self.libraries[m.group(1)] = current_library
continue
# Check for a library test and its failures
m = failure_regex.match(line)
if m:
test = Test(current_library, m.group(1))
for platform_name in re.split('\s*', m.group(2)):
if platform_name != '':
platform = self.getPlatform(platform_name)
failure = Failure(test, platform)
test.addFailure(failure)
platform.addFailure(failure)
pass
current_library.addTest(test)
continue
pass
return True
def getIssuesEmail(self):
"""
Retrieve the issues email from beta.boost.org, trying a few
times in case something wonky is happening. If we can retrieve
the file, calls parseIssuesEmail and return True; otherwise,
return False.
"""
base_url = "http://beta.boost.org/development/tests/"
base_url += self.branch
base_url += "/developer/";
got_issues = False
# Ping the server by looking for an HTML file
print "Pinging the server to initiate extraction..."
ping_url = base_url + "issues.html"
os.system('curl -O ' + ping_url)
os.system('rm -f issues.html')
for x in range(30):
# Update issues-email.txt
url = base_url + "issues-email.txt"
print 'Retrieving issues email from ' + url
os.system('rm -f issues-email.txt')
os.system('curl -O ' + url)
if self.parseIssuesEmail():
return True
print 'Failed to fetch issues email. '
time.sleep (30)
return False
# Parses the file $BOOST_ROOT/libs/maintainers.txt to create a hash
# mapping from the library name to the list of maintainers.
def parseLibraryMaintainersFile(self):
"""
Parse the maintainers file in ../../../libs/maintainers.txt to
collect information about the maintainers of broken libraries.
"""
lib_maintainer_regex = re.compile('(\S+)\s*(.*)')
name_email_regex = re.compile('\s*(\w*(\s*\w+)+)\s*<\s*(\S*(\s*\S+)+)\S*>')
at_regex = re.compile('\s*-\s*at\s*-\s*')
for line in file('../../../libs/maintainers.txt', 'r'):
if line.startswith('#'):
continue
m = lib_maintainer_regex.match (line)
if m:
libname = m.group(1)
if self.libraries.has_key(m.group(1)):
library = self.libraries[m.group(1)]
for person in re.split('\s*,\s*', m.group(2)):
nmm = name_email_regex.match(person)
if nmm:
name = nmm.group(1)
email = nmm.group(3)
email = at_regex.sub('@', email)
maintainer = self.getMaintainer(name, email)
maintainer.addLibrary(library)
library.addMaintainer(maintainer)
pass
pass
pass
pass
pass
pass
# Parses the file $BOOST_ROOT/libs/platform_maintainers.txt to
# create a hash mapping from the platform name to the list of
# maintainers.
def parsePlatformMaintainersFile(self):
"""
Parse the platform maintainers file in
../../../libs/platform_maintainers.txt to collect information
about the maintainers of the various platforms.
"""
platform_maintainer_regex = re.compile('([A-Za-z0-9_.-]*|"[^"]*")\s+(\S+)\s+(.*)')
name_email_regex = re.compile('\s*(\w*(\s*\w+)+)\s*<\s*(\S*(\s*\S+)+)\S*>')
at_regex = re.compile('\s*-\s*at\s*-\s*')
for line in file('../../../libs/platform_maintainers.txt', 'r'):
if line.startswith('#'):
continue
m = platform_maintainer_regex.match (line)
if m:
platformname = m.group(2)
if self.platforms.has_key(platformname):
platform = self.platforms[platformname]
for person in re.split('\s*,\s*', m.group(3)):
nmm = name_email_regex.match(person)
if nmm:
name = nmm.group(1)
email = nmm.group(3)
email = at_regex.sub('@', email)
maintainer = self.getPlatformMaintainer(name, email)
maintainer.addPlatform(m.group(1), platform)
platform.addMaintainer(maintainer)
pass
pass
pass
pass
pass
def numFailures(self):
count = 0
for library in self.libraries:
count += self.libraries[library].numFailures()
pass
return count
def numReportableFailures(self):
count = 0
for library in self.libraries:
count += self.libraries[library].numReportableFailures()
pass
return count
def composeSummaryEmail(self):
"""
Compose a message to send to the Boost developer's
list. Return the message and return it.
"""
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: boost@lists.boost.org
Reply-To: boost@lists.boost.org
Subject: [Report] """
message += str(self.numFailures()) + " failures on " + branch
if branch != 'trunk':
message += ' branch'
message += " (" + str(datetime.date.today()) + ")"
message += """
Boost regression test failures
"""
message += "Report time: " + self.date + """
This report lists all regression test failures on high-priority platforms.
Detailed report:
"""
message += ' ' + self.url + '\n\n'
if self.numFailures() == 0:
message += "No failures! Yay!\n"
return message
# List the platforms that are broken
any_broken_platforms = self.numReportableFailures() < self.numFailures()
if any_broken_platforms:
message += """The following platforms have a large number of failures:
"""
for platform in sorted_keys( self.platforms ):
if self.platforms[platform].isBroken():
message += (' ' + platform + ' ('
+ str(len(self.platforms[platform].failures))
+ ' failures)\n')
message += """
Failures on these "broken" platforms will be omitted from the results below.
Please see the full report for information about these failures.
"""
# Display the number of failures
message += (str(self.numReportableFailures()) + ' failures in ' +
str(len(self.libraries)) + ' libraries')
if any_broken_platforms:
message += (' (plus ' + str(self.numFailures() - self.numReportableFailures())
+ ' from broken platforms)')
message += '\n'
# Display the number of failures per library
for k in sorted_keys( self.libraries ):
library = self.libraries[k]
num_failures = library.numFailures()
message += ' ' + library.name + ' ('
if library.numReportableFailures() > 0:
message += (str(library.numReportableFailures())
+ " failures")
if library.numReportableFailures() < num_failures:
if library.numReportableFailures() > 0:
message += ', plus '
message += (str(num_failures-library.numReportableFailures())
+ ' failures on broken platforms')
message += ')\n'
pass
message += '\n'
# Provide the details for the failures in each library.
for k in sorted_keys( self.libraries ):
library = self.libraries[k]
if library.numReportableFailures() > 0:
message += '\n|' + library.name + '|\n'
for test in library.tests:
if test.numReportableFailures() > 0:
message += ' ' + test.name + ':'
for failure in test.failures:
platform = failure.platform
if not platform.isBroken():
message += ' ' + platform.name
message += '\n'
return message
def composeTestingSummaryEmail(self):
"""
Compose a message to send to the Boost Testing list. Returns
the message text if a message is needed, returns None
otherwise.
"""
brokenPlatforms = 0
for platform in sorted_keys( self.platforms ):
if self.platforms[platform].isBroken():
brokenPlatforms = brokenPlatforms + 1
if brokenPlatforms == 0:
return None;
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: boost-testing@lists.boost.org
Reply-To: boost-testing@lists.boost.org
Subject: [Report] """
message += str(brokenPlatforms) + " potentially broken platforms on " + branch
if branch != 'trunk':
message += ' branch'
message += " (" + str(datetime.date.today()) + ")"
message += """
Potentially broken platforms for Boost regression testing
"""
message += "Report time: " + self.date + """
This report lists the high-priority platforms that are exhibiting a
large number of regression test failures, which might indicate a problem
with the test machines or testing harness.
Detailed report:
"""
message += ' ' + self.url + '\n'
message += """
Platforms with a large number of failures:
"""
for platform in sorted_keys( self.platforms ):
if self.platforms[platform].isBroken():
message += (' ' + platform + ' ('
+ str(len(self.platforms[platform].failures))
+ ' failures)\n')
return message
# Send a message to "person" (a maintainer of a library that is
# failing).
# maintainers is the result of get_library_maintainers()
def send_individualized_message (branch, person, maintainers):
# There are several states we could be in:
# 0 Initial state. Eat everything up to the "NNN failures in MMM
# libraries" line
# 1 Suppress output within this library
# 2 Forward output within this library
state = 0
failures_in_lib_regex = re.compile('\d+ failur.*\d+ librar')
lib_failures_regex = re.compile(' (\S+) \((\d+)\)')
lib_start_regex = re.compile('\|(\S+)\|')
general_pass_regex = re.compile(' http://')
for line in file('issues-email.txt', 'r'):
if state == 0:
lfm = lib_failures_regex.match(line)
if lfm:
# Pass the line through if the current person is a
# maintainer of this library
if lfm.group(1) in maintainers and person in maintainers[lfm.group(1)]:
message += line
print line,
elif failures_in_lib_regex.match(line):
message += "\nThere are failures in these libraries you maintain:\n"
elif general_pass_regex.match(line):
message += line
lib_start = lib_start_regex.match(line)
if lib_start:
if state == 0:
message += '\n'
if lib_start.group(1) in maintainers and person in maintainers[lib_start.group(1)]:
message += line
state = 2
else:
state = 1
else:
if state == 1:
pass
elif state == 2:
message += line
if '--debug' in sys.argv:
print '-----------------Message text----------------'
print message
else:
print
if '--send' in sys.argv:
print "Sending..."
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = 'Douglas Gregor <dgregor@osl.iu.edu>',
to_addrs = person[1],
msg = message)
print "Done."
# Send a message to the developer's list
def send_boost_developers_message(branch, maintainers, failing_libraries):
to_line = 'boost@lists.boost.org'
from_line = 'Douglas Gregor <dgregor@osl.iu.edu>'
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: boost@lists.boost.org
Reply-To: boost@lists.boost.org
Subject: Boost regression testing notification ("""
message += str(datetime.date.today()) + " [" + branch + "]"
message += ")"
message += """
"""
for line in file('issues-email.txt', 'r'):
# Right before the detailed report, put out a warning message if
# any libraries with failures to not have maintainers listed.
if line.startswith('Detailed report:'):
missing_maintainers = False
for lib in failing_libraries:
if not(lib in maintainers) or maintainers[lib] == list():
missing_maintainers = True
if missing_maintainers:
message += """WARNING: The following libraries have failing regression tests but do
not have a maintainer on file. Once a maintainer is found, add an
entry to libs/maintainers.txt to eliminate this message.
"""
for lib in failing_libraries:
if not(lib in maintainers) or maintainers[lib] == list():
message += ' ' + lib + '\n'
message += '\n'
message += line
if '--send' in sys.argv:
print 'Sending notification email...'
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = from_line, to_addrs = to_line, msg = message)
print 'Done.'
if '--debug' in sys.argv:
print "----------Boost developer's message text----------"
print message
###############################################################################
# Main program #
###############################################################################
# Parse command-line options
branch = "trunk"
for arg in sys.argv:
if arg.startswith("--branch="):
branch = arg[len("--branch="):]
report = Report(branch)
# Try to parse the issues e-mail
if '--no-get' in sys.argv:
okay = report.parseIssuesEmail()
else:
okay = report.getIssuesEmail()
if not okay:
print 'Aborting.'
if '--send' in sys.argv:
message = """From: Douglas Gregor <dgregor@osl.iu.edu>
To: Douglas Gregor <dgregor@osl.iu.edu>
Reply-To: boost@lists.boost.org
Subject: Regression status script failed on """
message += str(datetime.date.today()) + " [" + branch + "]"
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = 'Douglas Gregor <dgregor@osl.iu.edu>',
to_addrs = 'dgregor@osl.iu.edu',
msg = message)
sys.exit(1)
# Try to parse maintainers information
report.parseLibraryMaintainersFile()
report.parsePlatformMaintainersFile()
# Generate individualized e-mail for library maintainers
for maintainer_name in report.maintainers:
maintainer = report.maintainers[maintainer_name]
email = maintainer.composeEmail(report)
if email:
if '--send' in sys.argv:
print ('Sending notification email to ' + maintainer.name + '...')
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = report_author,
to_addrs = maintainer.email,
msg = email)
print 'done.\n'
else:
print 'Would send a notification e-mail to',maintainer.name
if '--debug' in sys.argv:
print ('Message text for ' + maintainer.name + ':\n')
print email
# Generate individualized e-mail for platform maintainers
for maintainer_name in report.platform_maintainers:
maintainer = report.platform_maintainers[maintainer_name]
email = maintainer.composeEmail(report)
if email:
if '--send' in sys.argv:
print ('Sending notification email to ' + maintainer.name + '...')
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = report_author,
to_addrs = maintainer.email,
msg = email)
print 'done.\n'
else:
print 'Would send a notification e-mail to',maintainer.name
if '--debug' in sys.argv:
print ('Message text for ' + maintainer.name + ':\n')
print email
email = report.composeSummaryEmail()
if '--send' in sys.argv:
print 'Sending summary email to Boost developer list...'
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = report_author,
to_addrs = boost_dev_list,
msg = email)
print 'done.\n'
if '--debug' in sys.argv:
print 'Message text for summary:\n'
print email
email = report.composeTestingSummaryEmail()
if email:
if '--send' in sys.argv:
print 'Sending summary email to Boost testing list...'
smtp = smtplib.SMTP('milliways.osl.iu.edu')
smtp.sendmail(from_addr = report_author,
to_addrs = boost_testing_list,
msg = email)
print 'done.\n'
if '--debug' in sys.argv:
print 'Message text for testing summary:\n'
print email
if not ('--send' in sys.argv):
print 'Chickening out and not sending any e-mail.'
print 'Use --send to actually send e-mail, --debug to see e-mails.'

View File

@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<root>
<expected-failures>
</expected-failures>
</root>

View File

@@ -1,371 +0,0 @@
# Copyright (c) MetaCommunications, Inc. 2003-2004
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import shutil
import os.path
import os
import string
import time
import sys
import utils
import runner
report_types = [ 'us', 'ds', 'ud', 'dd', 'l', 'p', 'x', 'i', 'n', 'ddr', 'dsr' ]
if __name__ == '__main__':
run_dir = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
else:
run_dir = os.path.abspath( os.path.dirname( sys.modules[ __name__ ].__file__ ) )
def map_path( path ):
return os.path.join( run_dir, path )
def xsl_path( xsl_file_name, v2 = 0 ):
if v2:
return map_path( os.path.join( 'xsl/v2', xsl_file_name ) )
else:
return map_path( os.path.join( 'xsl', xsl_file_name ) )
def make_result_pages(
test_results_file
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, results_dir
, result_prefix
, reports
, v2
):
utils.log( 'Producing the reports...' )
__log__ = 1
output_dir = os.path.join( results_dir, result_prefix )
utils.makedirs( output_dir )
if comment_file != '':
comment_file = os.path.abspath( comment_file )
if expected_results_file != '':
expected_results_file = os.path.abspath( expected_results_file )
else:
expected_results_file = os.path.abspath( map_path( 'empty_expected_results.xml' ) )
extended_test_results = os.path.join( output_dir, 'extended_test_results.xml' )
if 'x' in reports:
utils.log( ' Merging with expected results...' )
utils.libxslt(
utils.log
, test_results_file
, xsl_path( 'add_expected_results.xsl', v2 )
, extended_test_results
, { 'expected_results_file': expected_results_file
, 'failures_markup_file' : failures_markup_file
, 'source' : tag }
)
links = os.path.join( output_dir, 'links.html' )
utils.makedirs( os.path.join( output_dir, 'output' ) )
for mode in ( 'developer', 'user' ):
utils.makedirs( os.path.join( output_dir, mode , 'output' ) )
if 'l' in reports:
utils.log( ' Making test output files...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'links_page.xsl', v2 )
, links
, {
'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'explicit_markup_file': failures_markup_file
}
)
issues = os.path.join( output_dir, 'developer', 'issues.html' )
if 'i' in reports:
utils.log( ' Making issues list...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'issues_page.xsl', v2 )
, issues
, {
'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'explicit_markup_file': failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'd' in reports:
utils.log( ' Making detailed %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl', v2 )
, os.path.join( output_dir, mode, 'index.html' )
, {
'links_file': 'links.html'
, 'mode': mode
, 'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 's' in reports:
utils.log( ' Making summary %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl', v2 )
, os.path.join( output_dir, mode, 'summary.html' )
, {
'mode' : mode
, 'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
}
)
if v2 and "ddr" in reports:
utils.log( ' Making detailed %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl', v2 )
, os.path.join( output_dir, "developer", 'index_release.html' )
, {
'links_file': 'links.html'
, 'mode': "developer"
, 'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
, 'release': "yes"
}
)
if v2 and "dsr" in reports:
utils.log( ' Making summary %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl', v2 )
, os.path.join( output_dir, "developer", 'summary_release.html' )
, {
'mode' : "developer"
, 'source': tag
, 'run_date': run_date
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
, 'release': 'yes'
}
)
if 'e' in reports:
utils.log( ' Generating expected_results ...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'produce_expected_results.xsl', v2 )
, os.path.join( output_dir, 'expected_results.xml' )
)
if v2 and 'n' in reports:
utils.log( ' Making runner comment files...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'runners.xsl', v2 )
, os.path.join( output_dir, 'runners.html' )
)
shutil.copyfile(
xsl_path( 'html/master.css', v2 )
, os.path.join( output_dir, 'master.css' )
)
def build_xsl_reports(
locate_root_dir
, tag
, expected_results_file
, failures_markup_file
, comment_file
, results_dir
, result_file_prefix
, dont_collect_logs = 0
, reports = report_types
, v2 = 0
, user = None
, upload = False
):
( run_date ) = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.gmtime() )
test_results_file = os.path.join( results_dir, 'test_results.xml' )
bin_boost_dir = os.path.join( locate_root_dir, 'bin', 'boost' )
if v2:
import merger
merger.merge_logs(
tag
, user
, results_dir
, test_results_file
, dont_collect_logs
)
else:
utils.log( ' dont_collect_logs: %s' % dont_collect_logs )
if not dont_collect_logs:
f = open( test_results_file, 'w+' )
f.write( '<tests>\n' )
runner.collect_test_logs( [ bin_boost_dir ], f )
f.write( '</tests>\n' )
f.close()
make_result_pages(
test_results_file
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, results_dir
, result_file_prefix
, reports
, v2
)
if v2 and upload:
upload_dir = 'regression-logs/'
utils.log( 'Uploading v2 results into "%s" [connecting as %s]...' % ( upload_dir, user ) )
archive_name = '%s.tar.gz' % result_file_prefix
utils.tar(
os.path.join( results_dir, result_file_prefix )
, archive_name
)
utils.sourceforge.upload( os.path.join( results_dir, archive_name ), upload_dir, user )
utils.sourceforge.untar( os.path.join( upload_dir, archive_name ), user, background = True )
def accept_args( args ):
args_spec = [
'locate-root='
, 'tag='
, 'expected-results='
, 'failures-markup='
, 'comment='
, 'results-dir='
, 'results-prefix='
, 'dont-collect-logs'
, 'reports='
, 'v2'
, 'user='
, 'upload'
, 'help'
]
options = {
'--comment': ''
, '--expected-results': ''
, '--failures-markup': ''
, '--reports': string.join( report_types, ',' )
, '--tag': None
, '--user': None
, 'upload': False
}
utils.accept_args( args_spec, args, options, usage )
if not options.has_key( '--results-dir' ):
options[ '--results-dir' ] = options[ '--locate-root' ]
if not options.has_key( '--results-prefix' ):
if options.has_key( '--v2' ):
options[ '--results-prefix' ] = 'all'
else:
options[ '--results-prefix' ] = ''
return (
options[ '--locate-root' ]
, options[ '--tag' ]
, options[ '--expected-results' ]
, options[ '--failures-markup' ]
, options[ '--comment' ]
, options[ '--results-dir' ]
, options[ '--results-prefix' ]
, options.has_key( '--dont-collect-logs' )
, options[ '--reports' ].split( ',' )
, options.has_key( '--v2' )
, options[ '--user' ]
, options.has_key( '--upload' )
)
def usage():
print 'Usage: %s [options]' % os.path.basename( sys.argv[0] )
print '''
\t--locate-root the same as --locate-root in compiler_status
\t--tag the tag for the results (i.e. 'CVS-HEAD')
\t--expected-results the file with the results to be compared with
\t the current run
\t--failures-markup the file with the failures markup
\t--comment an html comment file (will be inserted in the reports)
\t--results-dir the directory containing -links.html, -fail.html
\t files produced by compiler_status (by default the
\t same as specified in --locate-root)
\t--results-prefix the prefix of -links.html, -fail.html
\t files produced by compiler_status
\t--v2 v2 reports (combine multiple runners results into a
\t single set of reports)
The following options are valid only for v2 reports:
\t--user SourceForge user name for a shell account
\t--upload upload v2 reports to SourceForge
The following options are useful in debugging:
\t--dont-collect-logs dont collect the test logs
\t--reports produce only the specified reports
\t us - user summary
\t ds - developer summary
\t ud - user detailed
\t dd - developer detailed
\t l - links
\t p - patches
\t x - extended results file
\t i - issues
'''
def main():
build_xsl_reports( *accept_args( sys.argv[ 1 : ] ) )
if __name__ == '__main__':
main()

View File

@@ -1,165 +0,0 @@
import xml.sax.saxutils
import time
def make_test_name( library_idx, test_idx ):
return "test_%02d_%02d" % ( library_idx, test_idx )
def make_library_name( library_idx ):
if library_idx % 4 in ( 0, 1 ):
return "library_%02d/%02d" % ( int( library_idx / 4 ) * 4, library_idx % 4 )
else:
return "library_%02d" % library_idx
def make_toolset_name( toolset_idx ):
return "toolset_%02d" % toolset_idx
def make_library_target_directory( library_idx, toolset_idx, variant = None ):
base = "lib/%s/%s" % ( make_library_name( library_idx )
, make_toolset_name( toolset_idx ) )
if variant is not None:
return "%s/%s" % ( base, variant )
else:
return base
def make_test_target_directory( library_idx, toolset_idx, test_name, variant ):
base = "%s/%s/%s" % ( make_library_name( library_idx )
, make_toolset_name( toolset_idx )
, test_name )
if variant is not None:
return "%s/%s" % ( base, variant )
else:
return base
def format_timestamp( timestamp ):
return time.strftime( "%Y-%m-%dT%H:%M:%SZ", timestamp )
def make_test_log( xml_generator
, library_idx
, toolset_idx
, test_name
, test_type
, test_result
, show_run_output
, variant ):
library = make_library_name( library_idx )
toolset_name = make_toolset_name( toolset_idx )
target_directory = ""
if test_type != "lib":
target_directory = make_test_target_directory( library_idx, toolset_idx, test_name, variant )
else:
target_directory = make_library_target_directory( library_idx, toolset_idx, variant )
xml_generator.startElement( "test-log", { "library": library
, "test-name": test_name
, "toolset": toolset_name
, "test-type": test_type
, "test-program": "some_program"
, "target-directory": target_directory
, "show-run-output": show_run_output
} )
if test_type != "lib":
if test_result == "success" and ( toolset_idx + 1 ) % 4:
xml_generator.startElement( "compile", { "result": "success" } );
xml_generator.characters( "Compiling in %s" % target_directory )
xml_generator.endElement( "compile" )
if test_type.find( "link" ) == 0 or test_type.find( "run" ) == 0 and toolset_idx % 4:
xml_generator.startElement( "lib", { "result": test_result } );
xml_generator.characters( make_library_target_directory( library_idx, toolset_idx ) )
xml_generator.endElement( "lib" )
xml_generator.startElement( "link", { "result": "success" } );
xml_generator.characters( "Linking in %s" % target_directory )
xml_generator.endElement( "link" )
if test_type.find( "run" ) == 0 and ( toolset_idx + 2 ) % 4:
xml_generator.startElement( "run", { "result": test_result } );
xml_generator.characters( "Running in %s" % target_directory )
xml_generator.endElement( "run" )
else:
xml_generator.startElement( "compile", { "result": test_result } );
xml_generator.characters( "Compiling in %s" % make_library_target_directory( library_idx, toolset_idx ) )
xml_generator.endElement( "compile" )
xml_generator.endElement( "test-log" )
def make_expicit_failure_markup( num_of_libs, num_of_toolsets, num_of_tests ):
g = xml.sax.saxutils.XMLGenerator( open( "explicit-failures-markup.xml", "w" ), "utf-8" )
g.startDocument()
g.startElement( "explicit-failures-markup", {} );
# required toolsets
for i_toolset in range( 0, num_of_toolsets ):
if i_toolset < 2:
g.startElement( "mark-toolset", { "name": "toolset_%02d" % i_toolset, "status":"required"} )
g.endElement( "mark-toolset" )
for i_library in range( 0, num_of_libs ):
g.startElement( "library", { "name": make_library_name( i_library ) } )
if i_library % 4 == 0:
g.startElement( "mark-unusable", {} )
for i_toolset in range( 0, num_of_toolsets ):
if i_toolset % 2 == 1:
g.startElement( "toolset", { "name": make_toolset_name( i_toolset ) } )
g.endElement( "toolset" )
g.startElement( "note", { "author": u"T. T\xe8st" } )
g.characters( "Test note" )
g.endElement( "note" )
g.endElement( "mark-unusable" )
for i_test in range( 0, num_of_tests ):
category = 0
explicitly_marked_failure = 0
unresearched = 0
if i_test % 2 == 0:
category = i_test % 3
if i_test % 3 == 0:
explicitly_marked_failure = 1
if i_test % 2 == 0:
unresearched = 1
if category or explicitly_marked_failure:
test_attrs = { "name": make_test_name( i_library, i_test ) }
if category:
test_attrs[ "category" ] = "Category %s" % category
g.startElement( "test", test_attrs )
if explicitly_marked_failure:
failure_attrs = {}
if unresearched: failure_attrs[ "reason" ] = "not-researched"
g.startElement( "mark-failure", failure_attrs )
g.startElement( "toolset", { "name": make_toolset_name( 1 ) } )
g.endElement( "toolset" )
g.startElement( "toolset", { "name": make_toolset_name( 0 ) } )
g.endElement( "toolset" )
g.startElement( "toolset", { "name": make_toolset_name( 2 ) } )
g.endElement( "toolset" )
g.startElement( "note", { "author": u"V. Ann\xf3tated" } )
g.characters( "Some thoughtful note" )
g.endElement( "note" )
g.endElement( "mark-failure" )
g.endElement( "test" );
g.endElement( "library" )
g.endElement( "explicit-failures-markup" )
g.endDocument()
def make_expected_results( num_of_libs, num_of_toolsets, num_of_tests ):
pass

Some files were not shown because too many files have changed in this diff Show More