Copy the report generation scripts to new subdir for eventual use.

This commit is contained in:
Rene Rivera
2015-01-21 12:56:41 -06:00
parent 479a0a94c0
commit 25bfcff92f
42 changed files with 6037 additions and 16 deletions

View File

@@ -21,7 +21,7 @@
href="http://boost.org/development/running_regression_tests.html">Instructions</a>
for running the regression tests as part of the published regression tests
are available at the Boost web site.</li>
<li><a href="doc/reports.html">Running Regression Test Reports</a>.</li>
<li><a href="reports/doc/reports.html">Running Regression Test Reports</a>.</li>
<li><a href="src/process_jam_log.cpp">process_jam_log.cpp</a> -
Processes the bjam outputs, creating a file named test_log.xml for each

1
reports/build/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/bin/

24
reports/build/Jamfile.jam Normal file
View File

@@ -0,0 +1,24 @@
# Copyright Rene Rivera 2015
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
exe boost_report
:
[ glob ../src/*.cpp ]
/boost/filesystem//boost_filesystem/<link>static
/boost//filesystem/<link>static
/boost//date_time/<link>static
/boost//regex/<link>static
/boost//program_options/<link>static
/boost//iostreams/<link>static
:
<define>BOOST_ALL_NO_LIB=1
<implicit-dependency>/boost//headers
;
explicit boost_report ;
alias install : bin ;
install bin : boost_report/<variant>release ;
explicit install bin ;

View File

@@ -11,10 +11,7 @@
</head>
<body>
<h1>
<img src="../../../boost.png" alt="boost.png (6897 bytes)" width="277"
height="86" /> Running Regression Test Reports
</h1>
<h1>Running Regression Test Reports</h1>
<h2>Introduction</h2>
@@ -65,7 +62,7 @@ mkdir boost-reports # or some other name of your choice
<blockquote>
<pre>
cd ~/boost/boost-reports # i.e. the directory created above
curl -o build_results_all.sh https://raw.githubusercontent.com/boostorg/regression/develop/xsl_reports/build_results_all.sh
curl -o build_results_all.sh https://raw.githubusercontent.com/boostorg/regression/develop/reports/src/build_results_all.sh
chmod +x build_results_all.sh
./build_results_all.sh
</pre>
@@ -80,8 +77,6 @@ chmod +x build_results_all.sh
<h2>To Do</h2>
<ul>
<li>Avoid the clone --recursive; just install the core libraries needed
to build the report generator.</li>
</ul>
<h2>Acknowledgements</h2>
@@ -92,14 +87,7 @@ chmod +x build_results_all.sh
</p>
<hr />
<p>
Revised
<!--webbot bot="Timestamp" S-Type="EDITED" S-Format="%d %B %Y" startspan -->
10 December 2014
<!--webbot bot="Timestamp" endspan i-checksum="32403" -->
</p>
<p>Copyright Beman Dawes 2014. Copyright Rene Rivera 2014.</p>
<p>Copyright Beman Dawes 2014. Copyright Rene Rivera 2014-2015.</p>
<p>
Distributed under the <a href="http://www.boost.org/LICENSE_1_0.txt">Boost

View File

@@ -0,0 +1,216 @@
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "add_expected_results.hpp"
#include "common.hpp"
#include "xml.hpp"
#include <string>
#include <boost/foreach.hpp>
#include <boost/functional/hash.hpp>
using namespace boost::regression;
bool target_result(const test_structure_t::test_log_t& test_log, const std::string& name) {
boost::unordered_map<std::string, test_structure_t::target_t>::const_iterator pos = test_log.targets.find(name);
if(pos != test_log.targets.end()) {
return pos->second.result;
} else {
return false;
}
}
bool is_test_log_complete(const test_structure_t::test_log_t& test_log) {
// FIXME: The original XSL function is buggy and
// Boost.Build relies on its behavior
return true;
if(test_log.test_type == "compile" || test_log.test_type == "compile_fail" ||
!target_result(test_log, "compile")) {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 0 &&
test_log.targets.count("run") == 0;
} else if(test_log.test_type == "link" || test_log.test_type == "link_fail" ||
test_log.test_type == "" || test_log.test_type == "lib" ||
!target_result(test_log, "link")) {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 1 &&
test_log.targets.count("run") == 0;
} else if(test_log.test_type == "run" || test_log.test_type == "run_fail" ||
test_log.test_type == "run_pyd" || test_log.test_type == "run_mpi") {
return test_log.targets.count("compile") == 1 &&
test_log.targets.count("link") == 1 &&
test_log.targets.count("run") == 1;
} else {
throw std::runtime_error("Unknown test type " + test_log.test_type);
}
}
std::string get_toolset_name(const std::string& toolset, const expected_results_t& expected_results) {
expected_results_t::toolset_aliases_t::const_iterator pos = expected_results.toolset_aliases.find(toolset);
if(pos != expected_results.toolset_aliases.end()) {
return pos->second;
} else {
return toolset;
}
}
void add_note(test_structure_t::test_log_t& test_log, const std::string& text, const std::string& class_name = "auto-note") {
test_log.notes.push_back("<span class=\"" + class_name + "\">" + text + "</span>");
}
void process_test_log(test_structure_t::test_log_t& test_log,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source) {
bool is_complete = is_test_log_complete(test_log);
bool has_failures = false;
typedef boost::unordered_map<std::string, test_structure_t::target_t>::const_reference target_ref;
BOOST_FOREACH(target_ref target, test_log.targets) {
if(!target.second.result) {
has_failures = true;
break;
}
}
bool actual_result = !(has_failures || !is_complete);
std::string toolset_name = get_toolset_name(test_log.toolset, expected_results);
const bool* expected_results_test_case = 0;
{
test_case_t test_id;
test_id.library = test_log.library;
test_id.test_name = test_log.test_name;
test_id.toolset_name = toolset_name;
expected_results_t::tests_t::const_iterator pos = expected_results.tests.find(test_id);
if(pos != expected_results.tests.end()) {
expected_results_test_case = &pos->second;
}
}
std::string category = "0";
node_ptr test_failures_markup = 0;
{
boost::unordered_map<std::string, node_ptr>::const_iterator pos = failures_markup.libraries.find(test_log.library);
if(pos != failures_markup.libraries.end()) {
node_ptr library_markup = pos->second;
FOR_EACH_ELEMENT(elem, library_markup) {
if(check_name(elem, "test")) {
std::string test_name;
if(lookup_attr(elem, "name", test_name) && re_match(test_name, test_log.test_name)) {
lookup_attr(elem, "category", category);
FOR_EACH_ELEMENT(mark_failure, elem) {
FOR_EACH_ELEMENT(toolset, mark_failure) {
std::string toolset_name;
if(lookup_attr(toolset, "name", toolset_name) && re_match(toolset_name, test_log.toolset)) {
test_failures_markup = mark_failure;
goto found_explicit_failure_markup;
}
}
}
}
} else if(check_name(elem, "mark-expected-failures")) {
bool has_test = false;
bool has_toolset = false;
FOR_EACH_ELEMENT(subelem, elem) {
std::string name;
bool has_name = lookup_attr(subelem, "name", name);
if(has_name && check_name(subelem, "test") && re_match(name, test_log.test_name)) {
has_test = true;
} else if(has_name && check_name(subelem, "toolset") && re_match(name, test_log.toolset)) {
has_toolset = true;
}
if(has_toolset && has_test) {
test_failures_markup = elem;
goto found_explicit_failure_markup;
}
}
}
}
}
found_explicit_failure_markup:;
}
bool is_new = (expected_results_test_case == 0);
bool has_explicit_markup = (test_failures_markup != 0);
bool expected_result = !(has_explicit_markup || (expected_results_test_case && !*expected_results_test_case));
bool status = (expected_result == actual_result);
bool unexpected_success = (expected_result == false && actual_result == true);
std::string expected_reason;
lookup_attr(test_failures_markup, "reason", expected_reason);
if(unexpected_success && has_explicit_markup) {
add_note(test_log,
"This test case was explicitly marked up in \n"
"<a href=\"https://github.com/boostorg/boost/blob/" + source + "/status/explicit-failures-markup.xml\">\n"
" status/explicit-failures-markup.xml</a> file in the Boost repository as \"expected to fail\",\n"
"but is passing. Please consult the notes/output below for more details.\n");
}
if(has_explicit_markup && lookup_element(test_failures_markup, "note") == 0) {
if(unexpected_success) {
add_note(test_log,
"No explanation was provided for this markup. Please contact the library \n"
"author(s)/maintainer(s) for more details.\n");
} else {
add_note(test_log,
"This failure was explicitly marked as expected in \n"
"<a href=\"https://github.com/boostorg/boost/blob/" + source + "/status/explicit-failures-markup.xml\">\n"
"status/explicit-failures-markup.xml</a> file in the Boost repository. \n"
"Please contact the library author(s)/maintainer(s) for the explanation of this markup.\n");
}
}
if(node_ptr elem = lookup_element(test_failures_markup, "note")) {
test_log.notes.push_back(elem);
}
if(expected_results_test_case && !*expected_results_test_case) {
if(unexpected_success) {
add_note(test_log,
"This test case used to fail in the reference (\"last-known-good\") release.\n");
} else {
add_note(test_log,
"This failure was present in the reference (\"last-known-good\") release.\n");
}
}
if(!is_complete && !has_failures) {
add_note(test_log,
"<b>[Reporting Tools Internal Error]</b> This test case's XML is missing one or more log entries\n"
"of the regression run's steps associated with the test case's type (\"" + test_log.test_type + "\").\n"
"Please <a href=\"mailto:mailto:boost-testing@lists.boost.org\">contact reporting tools \n"
"maintainers</a> about this problem.\n", "internal-error-note");
}
test_log.result = actual_result;
test_log.expected_result = expected_result;
test_log.expected_reason = expected_reason;
test_log.status = status;
test_log.is_new = is_new;
test_log.category = category;
}
// requires: source is a Git branch name
void boost::regression::add_expected_results(
test_structure_t::run_t& tests,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source)
{
BOOST_FOREACH(test_structure_t::toolset_group_t::reference toolset, tests.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::reference library, toolset.second) {
BOOST_FOREACH(test_structure_t::library_t::reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::reference test_log, test_case.second) {
process_test_log(test_log, failures_markup, expected_results, source);
}
}
}
}
}

View File

@@ -0,0 +1,26 @@
// add_expected_results.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef ADD_EXPECTED_RESULTS_HPP_INCLUDED
#define ADD_EXPECTED_RESULTS_HPP_INCLUDED
#include "xml.hpp"
namespace boost {
namespace regression {
void add_expected_results(
test_structure_t::run_t& tests,
const failures_markup_t& failures_markup,
const expected_results_t& expected_results,
const std::string& source);
}
}
#endif

View File

@@ -0,0 +1,181 @@
// boost_report.cpp
//
// Copyright (c) 2013
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENCE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "issues_page.hpp"
#include "links_page.hpp"
#include "result_page.hpp"
#include "issues_page.hpp"
#include "summary_page.hpp"
#include "add_expected_results.hpp"
#include "produce_expected_results.hpp"
#include "runners.hpp"
#include "xml.hpp"
#include <boost/shared_ptr.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
#include <boost/foreach.hpp>
#include <boost/exception/exception.hpp>
#include <boost/exception/diagnostic_information.hpp>
#include <boost/program_options.hpp>
#include <boost/range/algorithm/sort.hpp>
#include <iostream>
#include <fstream>
using namespace boost::regression;
boost::shared_ptr<boost::zip::zip_archive> global_zip;
int main(int argc, char* argv[]) {
boost::program_options::options_description desc;
boost::program_options::variables_map vm;
desc.add_options()
("input-file", boost::program_options::value<std::vector<std::string> >(), "Runner XML files")
("expected,e", boost::program_options::value<std::string>()->required(), "Expected results file")
("markup,m", boost::program_options::value<std::string>()->required(), "Failures markup file")
("tag", boost::program_options::value<std::string>()->required(), "the tag for the results (i.e. 'trunk')")
("run-date", boost::program_options::value<boost::posix_time::ptime>()->default_value(boost::posix_time::second_clock::universal_time()), "the timestamp of the report")
("reports,r", boost::program_options::value<std::vector<std::string> >(), "The reports to generate")
("css", boost::program_options::value<std::string>(), "The CSS file")
("comment", boost::program_options::value<std::string>()->required(), "The report comment file")
("help,h", "produce a help message")
;
boost::program_options::positional_options_description p;
p.add("input-file", -1);
try {
boost::program_options::store(boost::program_options::command_line_parser(argc, argv)
.options(desc).positional(p).run(), vm);
boost::program_options::notify(vm);
boost::posix_time::ptime now = vm["run-date"].as<boost::posix_time::ptime>();
std::string tag = vm["tag"].as<std::string>();
std::set<std::string> reports;
if(vm.count("reports")) {
BOOST_FOREACH(const std::string& report, vm["reports"].as<std::vector<std::string> >())
reports.insert(report);
}
std::vector<std::string> warnings;
test_structure_t structure;
failures_markup_t markup;
expected_results_t expected;
std::vector<test_structure_t::run_t*> runs;
std::cout << "Reading expected results" << std::endl;
boost::shared_ptr<document_type> expected_results = read_xml_file(vm["expected"].as<std::string>().c_str());
load_expected_results(&*expected_results, expected);
std::cout << "Reading failures markup" << std::endl;
boost::shared_ptr<document_type> failures_markup = read_xml_file(vm["markup"].as<std::string>().c_str());
load_failures_markup(&*failures_markup, markup);
std::ofstream zip_file("report.zip", std::ios_base::binary);
zip_file.exceptions(std::ios_base::failbit);
global_zip.reset(new boost::zip::zip_archive(zip_file));
if(vm.count("input-file")) {
std::vector<std::string> input_files = vm["input-file"].as<std::vector<std::string> >();
boost::sort(input_files);
BOOST_FOREACH(const std::string& file, input_files) {
boost::shared_ptr<document_type> test_results;
try {
std::cout << "Reading " << file << std::endl;
test_results = read_xml_file(file.c_str());
load_test_structure(&*test_results, structure, runs);
test_structure_t::run_t* test_run = runs.back();
std::cout << "Merging expected results" << std::endl;
add_expected_results(*test_run, markup, expected, tag);
std::cout << "Generating links pages" << std::endl;
// must be run before test_results is discarded
if(reports.count("l"))
links_page(markup, *test_run);
} catch(std::ios_base::failure& e) {
std::cerr << e.what() << std::endl;
} catch(boost::property_tree::detail::rapidxml::parse_error& e) {
std::cerr << e.what() << std::endl;
}
}
}
std::vector<std::string> modes;
modes.push_back("developer");
modes.push_back("user");
if (reports.count("i") != 0) {
std::cout << "Generating issues page" << std::endl;
issues_list("developer", structure, markup,
true, tag, now, warnings, "");
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "d"))
result_page(structure, markup,
false, tag, now, warnings, mode, vm["comment"].as<std::string>());
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "s"))
summary_page(mode, tag, now, std::vector<std::string>(),
structure, markup, false);
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "dr"))
result_page(structure, markup,
true, tag, now, warnings, mode, vm["comment"].as<std::string>());
}
BOOST_FOREACH(const std::string& mode, modes) {
if(reports.count(mode.substr(0, 1) + "sr"))
summary_page(mode, tag, now, std::vector<std::string>(),
structure, markup, true);
}
if (reports.count("e")) {
produce_expected_results(structure);
}
if(reports.count("n")) {
runners(structure);
}
if(vm.count("css")) {
std::cout << "Writing file master.css" << std::endl;
html_writer css("master.css");
std::string filename = vm["css"].as<std::string>();
std::ifstream input(filename.c_str());
if (input) {
std::string data(std::istreambuf_iterator<char>(input.rdbuf()), std::istreambuf_iterator<char>());
css << data;
} else {
std::cerr << "warning: Could not open file: " << filename << std::endl;
}
}
global_zip.reset();
} catch(boost::program_options::error& e) {
if(vm.count("help")) {
std::cerr << desc << std::endl;
} else {
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
} catch(boost::exception& e) {
std::cerr << boost::diagnostic_information(e) << std::endl;
return EXIT_FAILURE;
} catch(std::exception& e) {
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
}

View File

@@ -0,0 +1,905 @@
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import shutil
import codecs
import xml.sax.handler
import xml.sax.saxutils
import glob
import re
import os.path
import os
import string
import time
import sys
import ftplib
import utils
#===============================================================================
# The entry point is the boost_wide_report.py script. In the simplest
# case, it should be run as:
#
# python boost_wide_report.py
# --locate-root=XXX
# --results-dir=YYY
# --tag trunk
# --expected-results=XXX
# --failures-markup=XXX
#
# The 'trunk' is the tag of things that are tested, and should match the
# directory name on the server keeping uploaded individual results.
# 'results-dir' is a directory where individual results (zip files) will
# be downloaded, and then processed. expected-results and failures-markup
# should be paths to corresponding files in 'status' subdir of boost tree.
# locate-root should point at boost root, it's unclear if it of any use
# now.
#
# This will download and process *all* test results, but it will not
# upload them, so good for local testing. It's possible to run
# this command, interrupt it while it processes results, leave just
# a few .zip files in result dir, and then re-run with --dont-collect-logs
# option, to use downloaded zips only.
#===============================================================================
report_types = [ 'us', 'ds', 'ud', 'dd', 'l', 'p', 'i', 'n', 'ddr', 'dsr', 'udr', 'usr' ]
if __name__ == '__main__':
run_dir = os.path.abspath( os.path.dirname( sys.argv[ 0 ] ) )
else:
run_dir = os.path.abspath( os.path.dirname( sys.modules[ __name__ ].__file__ ) )
def map_path( path ):
return os.path.join( run_dir, path )
def xsl_path( xsl_file_name ):
return map_path( os.path.join( 'xsl/v2', xsl_file_name ) )
class file_info:
def __init__( self, file_name, file_size, file_date ):
self.name = file_name
self.size = file_size
self.date = file_date
def __repr__( self ):
return "name: %s, size: %s, date %s" % ( self.name, self.size, self.date )
#
# Find the mod time from unix format directory listing line
#
def get_date( f, words ):
# f is an ftp object
(response, modtime) = f.sendcmd('MDTM %s' % words[-1]).split( None, 2 )
year = int( modtime[0:4] )
month = int( modtime[4:6] )
day = int( modtime[6:8] )
hours = int( modtime[8:10] )
minutes = int( modtime[10:12] )
seconds = int( modtime[12:14] )
return ( year, month, day, hours, minutes, seconds, 0, 0, 0)
def list_ftp( f ):
# f is an ftp object
utils.log( "listing source content" )
lines = []
# 1. get all lines
f.dir( lambda x: lines.append( x ) )
# 2. split lines into words
word_lines = [ x.split( None, 8 ) for x in lines ]
# we don't need directories
result = [ file_info( l[-1], int( l[4] ), get_date( f, l ) ) for l in word_lines if l[0][0] != "d" ]
for f in result:
utils.log( " %s" % f )
return result
def list_dir( dir ):
utils.log( "listing destination content %s" % dir )
result = []
for file_path in glob.glob( os.path.join( dir, "*.zip" ) ):
if os.path.isfile( file_path ):
mod_time = time.gmtime( os.path.getmtime( file_path ) )
mod_time = ( mod_time[0], mod_time[1], mod_time[2], mod_time[3], mod_time[4], mod_time[5], 0, 0, mod_time[8] )
size = os.path.getsize( file_path )
result.append( file_info( os.path.basename( file_path ), size, mod_time ) )
for fi in result:
utils.log( " %s" % fi )
return result
def find_by_name( d, name ):
for dd in d:
if dd.name == name:
return dd
return None
# Proof:
# gmtime(result) = time_tuple
# mktime(gmtime(result)) = mktime(time_tuple)
# correction = mktime(gmtime(result)) - result
# result = mktime(time_tuple) - correction
def mkgmtime(time_tuple):
# treat the tuple as if it were local time
local = time.mktime(time_tuple)
# calculate the correction to get gmtime
old_correction = 0
correction = time.mktime(time.gmtime(local)) - local
result = local
# iterate until the correction doesn't change
while correction != old_correction:
old_correction = correction
correction = time.mktime(time.gmtime(result)) - result
result = local - correction
return result
def diff( source_dir_content, destination_dir_content ):
utils.log( "Finding updated files" )
result = ( [], [] ) # ( changed_files, obsolete_files )
for source_file in source_dir_content:
found = find_by_name( destination_dir_content, source_file.name )
if found is None: result[0].append( source_file.name )
elif time.mktime( found.date ) != time.mktime( source_file.date ) or \
found.size != source_file.size:
result[0].append( source_file.name )
else:
pass
for destination_file in destination_dir_content:
found = find_by_name( source_dir_content, destination_file.name )
if found is None: result[1].append( destination_file.name )
utils.log( " Updated files:" )
for f in result[0]:
utils.log( " %s" % f )
utils.log( " Obsolete files:" )
for f in result[1]:
utils.log( " %s" % f )
return result
def _modtime_timestamp( file ):
return os.stat( file ).st_mtime
root_paths = []
def shorten( file_path ):
root_paths.sort( lambda x, y: cmp( len(y ), len( x ) ) )
for root in root_paths:
if file_path.lower().startswith( root.lower() ):
return file_path[ len( root ): ].replace( "\\", "/" )
return file_path.replace( "\\", "/" )
class action:
def __init__( self, file_path ):
self.file_path_ = file_path
self.relevant_paths_ = [ self.file_path_ ]
self.boost_paths_ = []
self.dependencies_ = []
self.other_results_ = []
def run( self ):
utils.log( "%s: run" % shorten( self.file_path_ ) )
__log__ = 2
for dependency in self.dependencies_:
if not os.path.exists( dependency ):
utils.log( "%s doesn't exists, removing target" % shorten( dependency ) )
self.clean()
return
if not os.path.exists( self.file_path_ ):
utils.log( "target doesn't exists, building" )
self.update()
return
dst_timestamp = _modtime_timestamp( self.file_path_ )
utils.log( " target: %s [%s]" % ( shorten( self.file_path_ ), dst_timestamp ) )
needs_updating = 0
utils.log( " dependencies:" )
for dependency in self.dependencies_:
dm = _modtime_timestamp( dependency )
update_mark = ""
if dm > dst_timestamp:
needs_updating = 1
utils.log( ' %s [%s] %s' % ( shorten( dependency ), dm, update_mark ) )
if needs_updating:
utils.log( "target needs updating, rebuilding" )
self.update()
return
else:
utils.log( "target is up-to-date" )
def clean( self ):
to_unlink = self.other_results_ + [ self.file_path_ ]
for result in to_unlink:
utils.log( ' Deleting obsolete "%s"' % shorten( result ) )
if os.path.exists( result ):
os.unlink( result )
class merge_xml_action( action ):
def __init__( self, source, destination, expected_results_file, failures_markup_file, tag ):
action.__init__( self, destination )
self.source_ = source
self.destination_ = destination
self.tag_ = tag
self.expected_results_file_ = expected_results_file
self.failures_markup_file_ = failures_markup_file
self.dependencies_.extend( [
self.source_
, self.expected_results_file_
, self.failures_markup_file_
]
)
self.relevant_paths_.extend( [ self.source_ ] )
self.boost_paths_.extend( [ self.expected_results_file_, self.failures_markup_file_ ] )
def update( self ):
def filter_xml( src, dest ):
class xmlgen( xml.sax.saxutils.XMLGenerator ):
def __init__( self, writer ):
xml.sax.saxutils.XMLGenerator.__init__( self, writer )
self.trimmed = 0
self.character_content = ""
def startElement( self, name, attrs):
self.flush()
xml.sax.saxutils.XMLGenerator.startElement( self, name, attrs )
def endElement( self, name ):
self.flush()
xml.sax.saxutils.XMLGenerator.endElement( self, name )
def flush( self ):
content = self.character_content
self.character_content = ""
self.trimmed = 0
xml.sax.saxutils.XMLGenerator.characters( self, content )
def characters( self, content ):
if not self.trimmed:
max_size = pow( 2, 16 )
self.character_content += content
if len( self.character_content ) > max_size:
self.character_content = self.character_content[ : max_size ] + "...\n\n[The content has been trimmed by the report system because it exceeds %d bytes]" % max_size
self.trimmed = 1
o = open( dest, "w" )
try:
gen = xmlgen( o )
xml.sax.parse( src, gen )
finally:
o.close()
return dest
utils.log( 'Merging "%s" with expected results...' % shorten( self.source_ ) )
try:
trimmed_source = filter_xml( self.source_, '%s-trimmed.xml' % os.path.splitext( self.source_ )[0] )
utils.libxslt(
utils.log
, trimmed_source
, xsl_path( 'add_expected_results.xsl' )
, self.file_path_
, {
"expected_results_file" : self.expected_results_file_
, "failures_markup_file": self.failures_markup_file_
, "source" : self.tag_
}
)
os.unlink( trimmed_source )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
if os.path.exists( self.file_path_ ):
os.unlink( self.file_path_ )
def _xml_timestamp( xml_path ):
class timestamp_reader( xml.sax.handler.ContentHandler ):
def startElement( self, name, attrs ):
if name == 'test-run':
self.timestamp = attrs.getValue( 'timestamp' )
raise self
try:
xml.sax.parse( xml_path, timestamp_reader() )
raise 'Cannot extract timestamp from "%s". Invalid XML file format?' % xml_path
except timestamp_reader, x:
return x.timestamp
class make_links_action( action ):
def __init__( self, source, destination, output_dir, tag, run_date, comment_file, failures_markup_file ):
action.__init__( self, destination )
self.dependencies_.append( source )
self.source_ = source
self.output_dir_ = output_dir
self.tag_ = tag
self.run_date_ = run_date
self.comment_file_ = comment_file
self.failures_markup_file_ = failures_markup_file
self.links_file_path_ = os.path.join( output_dir, 'links.html' )
def update( self ):
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "output" ) )
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "developer", "output" ) )
utils.makedirs( os.path.join( os.path.dirname( self.links_file_path_ ), "user", "output" ) )
utils.log( ' Making test output files...' )
try:
utils.libxslt(
utils.log
, self.source_
, xsl_path( 'links_page.xsl' )
, self.links_file_path_
, {
'source': self.tag_
, 'run_date': self.run_date_
, 'comment_file': self.comment_file_
, 'explicit_markup_file': self.failures_markup_file_
}
)
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
open( self.file_path_, "w" ).close()
class unzip_action( action ):
def __init__( self, source, destination, unzip_func ):
action.__init__( self, destination )
self.dependencies_.append( source )
self.source_ = source
self.unzip_func_ = unzip_func
def update( self ):
try:
utils.log( ' Unzipping "%s" ... into "%s"' % ( shorten( self.source_ ), os.path.dirname( self.file_path_ ) ) )
self.unzip_func_( self.source_, os.path.dirname( self.file_path_ ) )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( self.source_, msg ) )
def ftp_task( site, site_path , destination ):
__log__ = 1
utils.log( '' )
utils.log( 'ftp_task: "ftp://%s/%s" -> %s' % ( site, site_path, destination ) )
utils.log( ' logging on ftp site %s' % site )
f = ftplib.FTP( site )
f.login()
utils.log( ' cwd to "%s"' % site_path )
f.cwd( site_path )
source_content = list_ftp( f )
source_content = [ x for x in source_content if re.match( r'.+[.](?<!log[.])zip', x.name ) and x.name.lower() != 'boostbook.zip' ]
destination_content = list_dir( destination )
d = diff( source_content, destination_content )
def synchronize():
for source in d[0]:
utils.log( 'Copying "%s"' % source )
result = open( os.path.join( destination, source ), 'wb' )
f.retrbinary( 'RETR %s' % source, result.write )
result.close()
mod_date = find_by_name( source_content, source ).date
m = mkgmtime( mod_date )
os.utime( os.path.join( destination, source ), ( m, m ) )
for obsolete in d[1]:
utils.log( 'Deleting "%s"' % obsolete )
os.unlink( os.path.join( destination, obsolete ) )
utils.log( " Synchronizing..." )
__log__ = 2
synchronize()
f.quit()
def unzip_archives_task( source_dir, processed_dir, unzip_func ):
utils.log( '' )
utils.log( 'unzip_archives_task: unpacking updated archives in "%s" into "%s"...' % ( source_dir, processed_dir ) )
__log__ = 1
target_files = [ os.path.join( processed_dir, os.path.basename( x.replace( ".zip", ".xml" ) ) ) for x in glob.glob( os.path.join( source_dir, "*.zip" ) ) ] + glob.glob( os.path.join( processed_dir, "*.xml" ) )
actions = [ unzip_action( os.path.join( source_dir, os.path.basename( x.replace( ".xml", ".zip" ) ) ), x, unzip_func ) for x in target_files ]
for a in actions:
a.run()
def merge_xmls_task( source_dir, processed_dir, merged_dir, expected_results_file, failures_markup_file, tag ):
utils.log( '' )
utils.log( 'merge_xmls_task: merging updated XMLs in "%s"...' % source_dir )
__log__ = 1
utils.makedirs( merged_dir )
target_files = [ os.path.join( merged_dir, os.path.basename( x ) ) for x in glob.glob( os.path.join( processed_dir, "*.xml" ) ) ] + glob.glob( os.path.join( merged_dir, "*.xml" ) )
actions = [ merge_xml_action( os.path.join( processed_dir, os.path.basename( x ) )
, x
, expected_results_file
, failures_markup_file
, tag ) for x in target_files ]
for a in actions:
a.run()
def make_links_task( input_dir, output_dir, tag, run_date, comment_file, extended_test_results, failures_markup_file ):
utils.log( '' )
utils.log( 'make_links_task: make output files for test results in "%s"...' % input_dir )
__log__ = 1
target_files = [ x + ".links" for x in glob.glob( os.path.join( input_dir, "*.xml" ) ) ] + glob.glob( os.path.join( input_dir, "*.links" ) )
actions = [ make_links_action( x.replace( ".links", "" )
, x
, output_dir
, tag
, run_date
, comment_file
, failures_markup_file
) for x in target_files ]
for a in actions:
a.run()
class xmlgen( xml.sax.saxutils.XMLGenerator ):
document_started = 0
def startDocument( self ):
if not self.document_started:
xml.sax.saxutils.XMLGenerator.startDocument( self )
self.document_started = 1
def merge_processed_test_runs( test_runs_dir, tag, writer ):
utils.log( '' )
utils.log( 'merge_processed_test_runs: merging processed test runs from %s into a single XML...' % test_runs_dir )
__log__ = 1
all_runs_xml = xmlgen( writer, encoding='utf-8' )
all_runs_xml.startDocument()
all_runs_xml.startElement( 'all-test-runs', {} )
files = glob.glob( os.path.join( test_runs_dir, '*.xml' ) )
for test_run in files:
#file_pos = writer.stream.tell()
file_pos = writer.tell()
try:
utils.log( ' Writing "%s" into the resulting XML...' % test_run )
xml.sax.parse( test_run, all_runs_xml )
except Exception, msg:
utils.log( ' Skipping "%s" due to errors (%s)' % ( test_run, msg ) )
#writer.stream.seek( file_pos )
#writer.stream.truncate()
writer.seek( file_pos )
writer.truncate()
all_runs_xml.endElement( 'all-test-runs' )
all_runs_xml.endDocument()
def execute_tasks(
tag
, user
, run_date
, comment_file
, results_dir
, output_dir
, reports
, warnings
, extended_test_results
, dont_collect_logs
, expected_results_file
, failures_markup_file
, report_executable
):
incoming_dir = os.path.join( results_dir, 'incoming', tag )
processed_dir = os.path.join( incoming_dir, 'processed' )
merged_dir = os.path.join( processed_dir, 'merged' )
if not os.path.exists( incoming_dir ):
os.makedirs( incoming_dir )
if not os.path.exists( processed_dir ):
os.makedirs( processed_dir )
if not os.path.exists( merged_dir ):
os.makedirs( merged_dir )
if not dont_collect_logs:
ftp_site = 'boost.cowic.de'
site_path = '/boost/do-not-publish-this-url/results/%s' % tag
ftp_task( ftp_site, site_path, incoming_dir )
unzip_archives_task( incoming_dir, processed_dir, utils.unzip )
if report_executable:
if not os.path.exists( merged_dir ):
os.makedirs( merged_dir )
command_line = report_executable
command_line += " --expected " + '"%s"' % expected_results_file
command_line += " --markup " + '"%s"' % failures_markup_file
command_line += " --comment " + '"%s"' % comment_file
command_line += " --tag " + tag
# command_line += " --run-date " + '"%s"' % run_date
command_line += " -rl"
for r in reports:
command_line += ' -r' + r
command_line += " --css " + xsl_path( 'html/master.css' )
for f in glob.glob( os.path.join( processed_dir, '*.xml' ) ):
command_line += ' "%s"' % f
utils.log("Producing the reports...")
utils.log("> "+command_line)
os.system(command_line)
return
merge_xmls_task( incoming_dir, processed_dir, merged_dir, expected_results_file, failures_markup_file, tag )
make_links_task( merged_dir
, output_dir
, tag
, run_date
, comment_file
, extended_test_results
, failures_markup_file )
results_xml_path = os.path.join( output_dir, 'extended_test_results.xml' )
#writer = codecs.open( results_xml_path, 'w', 'utf-8' )
writer = open( results_xml_path, 'w' )
merge_processed_test_runs( merged_dir, tag, writer )
writer.close()
make_result_pages(
extended_test_results
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, output_dir
, reports
, warnings
)
def make_result_pages(
extended_test_results
, expected_results_file
, failures_markup_file
, tag
, run_date
, comment_file
, output_dir
, reports
, warnings
):
utils.log( 'Producing the reports...' )
__log__ = 1
warnings_text = '+'.join( warnings )
if comment_file != '':
comment_file = os.path.abspath( comment_file )
links = os.path.join( output_dir, 'links.html' )
utils.makedirs( os.path.join( output_dir, 'output' ) )
for mode in ( 'developer', 'user' ):
utils.makedirs( os.path.join( output_dir, mode , 'output' ) )
issues = os.path.join( output_dir, 'developer', 'issues.html' )
if 'i' in reports:
utils.log( ' Making issues list...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'issues_page.xsl' )
, issues
, {
'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file': failures_markup_file
, 'release': "yes"
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'd' in reports:
utils.log( ' Making detailed %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl' )
, os.path.join( output_dir, mode, 'index.html' )
, {
'links_file': 'links.html'
, 'mode': mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 's' in reports:
utils.log( ' Making summary %s report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl' )
, os.path.join( output_dir, mode, 'summary.html' )
, {
'mode' : mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'dr' in reports:
utils.log( ' Making detailed %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'result_page.xsl' )
, os.path.join( output_dir, mode, 'index_release.html' )
, {
'links_file': 'links.html'
, 'mode': mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'expected_results_file': expected_results_file
, 'explicit_markup_file' : failures_markup_file
, 'release': "yes"
}
)
for mode in ( 'developer', 'user' ):
if mode[0] + 'sr' in reports:
utils.log( ' Making summary %s release report...' % mode )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'summary_page.xsl' )
, os.path.join( output_dir, mode, 'summary_release.html' )
, {
'mode' : mode
, 'source': tag
, 'run_date': run_date
, 'warnings': warnings_text
, 'comment_file': comment_file
, 'explicit_markup_file' : failures_markup_file
, 'release': 'yes'
}
)
if 'e' in reports:
utils.log( ' Generating expected_results ...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'produce_expected_results.xsl' )
, os.path.join( output_dir, 'expected_results.xml' )
)
if 'n' in reports:
utils.log( ' Making runner comment files...' )
utils.libxslt(
utils.log
, extended_test_results
, xsl_path( 'runners.xsl' )
, os.path.join( output_dir, 'runners.html' )
)
shutil.copyfile(
xsl_path( 'html/master.css' )
, os.path.join( output_dir, 'master.css' )
)
fix_file_names( output_dir )
def fix_file_names( dir ):
"""
The current version of xslproc doesn't correctly handle
spaces. We have to manually go through the
result set and decode encoded spaces (%20).
"""
utils.log( 'Fixing encoded file names...' )
for root, dirs, files in os.walk( dir ):
for file in files:
if file.find( "%20" ) > -1:
new_name = file.replace( "%20", " " )
utils.rename(
utils.log
, os.path.join( root, file )
, os.path.join( root, new_name )
)
def build_xsl_reports(
locate_root_dir
, tag
, expected_results_file
, failures_markup_file
, comment_file
, results_dir
, result_file_prefix
, dont_collect_logs = 0
, reports = report_types
, report_executable = None
, warnings = []
, user = None
, upload = False
):
( run_date ) = time.strftime( '%Y-%m-%dT%H:%M:%SZ', time.gmtime() )
root_paths.append( locate_root_dir )
root_paths.append( results_dir )
bin_boost_dir = os.path.join( locate_root_dir, 'bin', 'boost' )
output_dir = os.path.join( results_dir, result_file_prefix )
utils.makedirs( output_dir )
if expected_results_file != '':
expected_results_file = os.path.abspath( expected_results_file )
else:
expected_results_file = os.path.abspath( map_path( 'empty_expected_results.xml' ) )
extended_test_results = os.path.join( output_dir, 'extended_test_results.xml' )
execute_tasks(
tag
, user
, run_date
, comment_file
, results_dir
, output_dir
, reports
, warnings
, extended_test_results
, dont_collect_logs
, expected_results_file
, failures_markup_file
, report_executable
)
if upload:
upload_dir = 'regression-logs/'
utils.log( 'Uploading results into "%s" [connecting as %s]...' % ( upload_dir, user ) )
archive_name = '%s.tar.gz' % result_file_prefix
utils.tar(
os.path.join( results_dir, result_file_prefix )
, archive_name
)
utils.sourceforge.upload( os.path.join( results_dir, archive_name ), upload_dir, user )
utils.sourceforge.untar( os.path.join( upload_dir, archive_name ), user, background = True )
def accept_args( args ):
args_spec = [
'locate-root='
, 'tag='
, 'expected-results='
, 'failures-markup='
, 'comment='
, 'results-dir='
, 'results-prefix='
, 'dont-collect-logs'
, 'reports='
, 'boost-report='
, 'user='
, 'upload'
, 'help'
]
options = {
'--comment': ''
, '--expected-results': ''
, '--failures-markup': ''
, '--reports': string.join( report_types, ',' )
, '--boost-report': None
, '--tag': None
, '--user': None
, 'upload': False
}
utils.accept_args( args_spec, args, options, usage )
if not options.has_key( '--results-dir' ):
options[ '--results-dir' ] = options[ '--locate-root' ]
if not options.has_key( '--results-prefix' ):
options[ '--results-prefix' ] = 'all'
return (
options[ '--locate-root' ]
, options[ '--tag' ]
, options[ '--expected-results' ]
, options[ '--failures-markup' ]
, options[ '--comment' ]
, options[ '--results-dir' ]
, options[ '--results-prefix' ]
, options.has_key( '--dont-collect-logs' )
, options[ '--reports' ].split( ',' )
, options[ '--boost-report' ]
, options[ '--user' ]
, options.has_key( '--upload' )
)
def usage():
print 'Usage: %s [options]' % os.path.basename( sys.argv[0] )
print '''
\t--locate-root the same as --locate-root in compiler_status
\t--tag the tag for the results (i.e. 'trunk')
\t--expected-results the file with the results to be compared with
\t the current run
\t--failures-markup the file with the failures markup
\t--comment an html comment file (will be inserted in the reports)
\t--results-dir the directory containing -links.html, -fail.html
\t files produced by compiler_status (by default the
\t same as specified in --locate-root)
\t--results-prefix the prefix of -links.html, -fail.html
\t files produced by compiler_status
\t--user SourceForge user name for a shell account
\t--upload upload reports to SourceForge
The following options are useful in debugging:
\t--dont-collect-logs dont collect the test logs
\t--reports produce only the specified reports
\t us - user summary
\t ds - developer summary
\t ud - user detailed
\t dd - developer detailed
\t l - links
\t p - patches
\t x - extended results file
\t i - issues
\t n - runner comment files
'''
def main():
build_xsl_reports( *accept_args( sys.argv[ 1 : ] ) )
if __name__ == '__main__':
main()

226
reports/src/build_results_all.sh Executable file
View File

@@ -0,0 +1,226 @@
#!/bin/sh
#~ Copyright Rene Rivera 2014-2015
#~ Distributed under the Boost Software License, Version 1.0.
#~ (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
set -e
log_time()
{
echo `date` "::" $1 >> boost-reports-time.log
}
build_all()
{
log_time "Start of testing. [build_all]"
build_setup
update_tools
build_results develop 2>&1 | tee boost-reports/develop.log
build_results master 2>&1 | tee boost-reports/master.log
upload_results develop
upload_results master
log_time "End of testing. [build_all]"
}
git_update()
{
cwd=`pwd`
if [ -d "${1}" ]; then
cd "${1}"
git remote set-branches --add origin "${2}"
git pull --recurse-submodules
git checkout "${2}"
else
mkdir -p "${1}"
git init "${1}"
cd "${1}"
git remote add --no-tags -t "${2}" origin "${3}"
git fetch --depth=1
git checkout "${2}"
fi
cd "${cwd}"
}
git_submodule_update()
{
cwd=`pwd`
cd "${1}"
git submodule update --init "${2}"
cd "${cwd}"
}
build_setup()
{
log_time "Get tools. [build_setup]"
cwd=`pwd`
mkdir -p boost-reports/develop
mkdir -p boost-reports/master
log_time "Git; boost_root [build_setup]"
git_update "${cwd}/boost-reports/boost_root" master 'https://github.com/boostorg/boost.git'
git_submodule_update "${cwd}/boost-reports/boost_root" libs/algorithm
git_submodule_update "${cwd}/boost-reports/boost_root" libs/any
git_submodule_update "${cwd}/boost-reports/boost_root" libs/array
git_submodule_update "${cwd}/boost-reports/boost_root" libs/assert
git_submodule_update "${cwd}/boost-reports/boost_root" libs/bind
git_submodule_update "${cwd}/boost-reports/boost_root" libs/concept_check
git_submodule_update "${cwd}/boost-reports/boost_root" libs/config
git_submodule_update "${cwd}/boost-reports/boost_root" libs/container
git_submodule_update "${cwd}/boost-reports/boost_root" libs/core
git_submodule_update "${cwd}/boost-reports/boost_root" libs/crc
git_submodule_update "${cwd}/boost-reports/boost_root" libs/date_time
git_submodule_update "${cwd}/boost-reports/boost_root" libs/detail
git_submodule_update "${cwd}/boost-reports/boost_root" libs/exception
git_submodule_update "${cwd}/boost-reports/boost_root" libs/filesystem
git_submodule_update "${cwd}/boost-reports/boost_root" libs/foreach
git_submodule_update "${cwd}/boost-reports/boost_root" libs/format
git_submodule_update "${cwd}/boost-reports/boost_root" libs/function
git_submodule_update "${cwd}/boost-reports/boost_root" libs/functional
git_submodule_update "${cwd}/boost-reports/boost_root" libs/integer
git_submodule_update "${cwd}/boost-reports/boost_root" libs/io
git_submodule_update "${cwd}/boost-reports/boost_root" libs/iostreams
git_submodule_update "${cwd}/boost-reports/boost_root" libs/iterator
git_submodule_update "${cwd}/boost-reports/boost_root" libs/lexical_cast
git_submodule_update "${cwd}/boost-reports/boost_root" libs/math
git_submodule_update "${cwd}/boost-reports/boost_root" libs/move
git_submodule_update "${cwd}/boost-reports/boost_root" libs/mpl
git_submodule_update "${cwd}/boost-reports/boost_root" libs/numeric/conversion
git_submodule_update "${cwd}/boost-reports/boost_root" libs/optional
git_submodule_update "${cwd}/boost-reports/boost_root" libs/predef
git_submodule_update "${cwd}/boost-reports/boost_root" libs/preprocessor
git_submodule_update "${cwd}/boost-reports/boost_root" libs/property_tree
git_submodule_update "${cwd}/boost-reports/boost_root" libs/program_options
git_submodule_update "${cwd}/boost-reports/boost_root" libs/range
git_submodule_update "${cwd}/boost-reports/boost_root" libs/regex
git_submodule_update "${cwd}/boost-reports/boost_root" libs/smart_ptr
git_submodule_update "${cwd}/boost-reports/boost_root" libs/static_assert
git_submodule_update "${cwd}/boost-reports/boost_root" libs/system
git_submodule_update "${cwd}/boost-reports/boost_root" libs/throw_exception
git_submodule_update "${cwd}/boost-reports/boost_root" libs/tokenizer
git_submodule_update "${cwd}/boost-reports/boost_root" libs/tuple
git_submodule_update "${cwd}/boost-reports/boost_root" libs/type_index
git_submodule_update "${cwd}/boost-reports/boost_root" libs/type_traits
git_submodule_update "${cwd}/boost-reports/boost_root" libs/unordered
git_submodule_update "${cwd}/boost-reports/boost_root" libs/utility
git_submodule_update "${cwd}/boost-reports/boost_root" libs/variant
git_submodule_update "${cwd}/boost-reports/boost_root" libs/wave
git_submodule_update "${cwd}/boost-reports/boost_root" tools/inspect
log_time "Git; boost_regression [build_setup]"
git_update "${cwd}/boost-reports/boost_regression" develop 'https://github.com/boostorg/regression.git'
log_time "Git; boost_bb [build_setup]"
git_update "${cwd}/boost-reports/boost_bb" develop 'https://github.com/boostorg/build.git'
cd "${cwd}"
}
update_tools()
{
log_time "Build tools. [update_tools]"
cwd=`pwd`
cd "${cwd}/boost-reports/boost_bb"
./bootstrap.sh
cd "${cwd}/boost-reports/boost_regression/reports/build"
"${cwd}/boost-reports/boost_bb/b2" \
"--boost-build=${cwd}/boost-reports/boost_bb/src" \
"--boost-root=${cwd}/boost-reports/boost_root" install
cd "${cwd}"
}
report_info()
{
cat - > comment.html <<HTML
<table style="border-spacing: 0.5em;">
<tr>
<td style="vertical-align: top;"><tt>uname</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uname -a`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>uptime</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`uptime`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;"><tt>python</tt></td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`python --version 2>&1`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">previous run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`cat previous.txt`
</pre>
</td>
</tr>
<tr>
<td style="vertical-align: top;">current run</td>
<td>
<pre style="border: 1px solid #666; overflow: auto;">
`date -u`
</pre>
</td>
</tr>
</table>
HTML
date -u > previous.txt
}
build_results()
{
tag="${1?'error: command line missing branch-name argument'}"
log_time "Build results for branch ${tag}. [build_results]"
reports="dd,ds,i,n"
cwd=`pwd`
cd boost-reports
cd "${1}"
root=`pwd`
boost=${cwd}/boost-reports/boost_root
report_info
python "${cwd}/boost-reports/boost_regression/reports/src/boost_wide_report.py" \
--locate-root="${root}" \
--tag=${tag} \
--expected-results="${boost}/status/expected_results.xml" \
--failures-markup="${boost}/status/explicit-failures-markup.xml" \
--comment="comment.html" \
--user="" \
--reports=${reports} \
"--boost-report=${cwd}/boost-reports/boost_regression/reports/build/bin/boost_report"
cd "${cwd}"
}
upload_results()
{
log_time "Upload results for branch $1. [upload_results]"
cwd=`pwd`
cd boost-reports
upload_dir=/home/grafik/www.boost.org/testing
if [ -f ${1}/report.zip ]; then
mv ${1}/report.zip ${1}.zip
else
cd ${1}/all
rm -f ../../${1}.zip*
#~ zip -q -r -9 ../../${1} * -x '*.xml'
7za a -tzip -mx=9 ../../${1}.zip * '-x!*.xml'
cd "${cwd}"
fi
mv ${1}.zip ${1}.zip.uploading
rsync -vuz --rsh=ssh --stats \
${1}.zip.uploading grafik@beta.boost.org:/${upload_dir}/incoming/
ssh grafik@beta.boost.org \
cp --no-preserve=timestamps ${upload_dir}/incoming/${1}.zip.uploading ${upload_dir}/live/${1}.zip
mv ${1}.zip.uploading ${1}.zip
cd "${cwd}"
}
echo "=====-----=====-----=====-----=====-----=====-----=====-----=====-----" >> boost-reports-time.log
build_all 2>&1 | tee boost-reports.log

671
reports/src/common.cpp Normal file
View File

@@ -0,0 +1,671 @@
// Copyright MetaCommunications, Inc. 2003-2005.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "common.hpp"
#include "xml.hpp"
#include "html.hpp"
#include <boost/regex.hpp>
#include <boost/foreach.hpp>
#include <boost/algorithm/string/split.hpp>
#include <boost/algorithm/string/classification.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
#include <algorithm>
#include <set>
using namespace boost::regression;
std::string boost::regression::alternate_mode(const std::string& mode) {
if(mode == "user") {
return "developer";
} else {
return "user";
}
}
std::string boost::regression::release_postfix(bool is_release) {
if(is_release) {
return "_release";
} else {
return "";
}
}
// safe
void boost::regression::get_libraries(const test_structure_t& test_structure, std::set<std::string>& out) {
typedef boost::unordered_map<std::string, test_structure_t::platform_t>::const_reference outer_elem;
BOOST_FOREACH(outer_elem platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
out.insert(library.first);
}
}
}
}
}
#if 0
<func:function name="meta:test_case_status">
<xsl:param name="explicit_markup"/>
<xsl:param name="test_log"/>
<xsl:variable name="status">
<xsl:choose>
<xsl:when test="meta:is_unusable( $explicit_markup, $test_log/@library, $test_log/@toolset )">
<xsl:text>unusable</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='fail' and $test_log/@status='unexpected' and $test_log/@is-new='no'">
<xsl:text>fail-unexpected</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='fail' and $test_log/@status='unexpected' and $test_log/@is-new='yes'">
<xsl:text>fail-unexpected-new</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@result='success' and $test_log/@status='unexpected'">
<xsl:text>success-unexpected</xsl:text>
</xsl:when>
<xsl:when test="$test_log/@status='expected'">
<xsl:text>expected</xsl:text>
</xsl:when>
<xsl:otherwise>
<xsl:text>other</xsl:text>
</xsl:otherwise>
</xsl:choose>
</xsl:variable>
<func:result select="$status"/>
</func:function>
<func:function name="meta:is_toolset_required">
<xsl:param name="toolset"/>
<func:result select="count( $explicit_markup/explicit-failures-markup/mark-toolset[ @name = $toolset and @status='required' ] ) > 0"/>
</func:function>
#endif
bool boost::regression::is_library_beta(const failures_markup_t& explicit_markup, const std::string& library) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = explicit_markup.libraries.find(library);
if(pos != explicit_markup.libraries.end()) {
return check_attr(pos->second, "status", "beta");
}
return false;
}
bool boost::regression::is_test_log_a_test_case(const test_structure_t::test_log_t& test_log) {
const std::string& type = test_log.test_type;
return type == "compile" || type == "compile_fail" || type == "link" || type == "link_fail" ||
type == "run" || type == "run_fail" || type == "run_pyd" || type == "run_mpi";
}
// Does not assume any constraints on contents of the strings
bool boost::regression::is_unusable(const failures_markup_t& markup, const std::string& library, const std::string& toolset) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = markup.libraries.find(library);
if(pos != markup.libraries.end()) {
FOR_EACH_ELEMENT(mark_unusable, pos->second) {
if(check_name(mark_unusable, "mark-unusable")) {
FOR_EACH_ELEMENT(toolset_node, mark_unusable) {
std::string name;
if(lookup_attr(toolset_node, "name", name) && re_match(name, toolset)) {
return true;
}
}
}
}
}
return false;
}
void boost::regression::get_unusable(const failures_markup_t& markup,
const std::string& library,
const test_structure_t& test_structure,
boost::unordered_map<std::string, std::size_t>& out,
std::vector<node_ptr>& notes) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = markup.libraries.find(library);
if(pos != markup.libraries.end()) {
FOR_EACH_ELEMENT(mark_unusable, pos->second) {
if(check_name(mark_unusable, "mark-unusable")) {
node_ptr note = 0;
std::vector<std::string> toolsets;
FOR_EACH_ELEMENT(toolset_node, mark_unusable) {
std::string name;
if(check_name(toolset_node, "toolset") && lookup_attr(toolset_node, "name", name)) {
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
if(re_match(name, toolset.first)) {
toolsets.push_back(toolset.first);
}
}
}
}
} else if(check_name(toolset_node, "note")) {
note = toolset_node;
}
}
if(note != 0 && !toolsets.empty()) {
notes.push_back(note);
BOOST_FOREACH(const std::string& toolset, toolsets) {
out[toolset] = notes.size();
}
}
}
}
}
}
// There are no restrictions on the pattern or the
// string. The only special character in the pattern
// is '*', which matches any number of consecutive characters.
bool boost::regression::re_match(const std::string& pattern, const std::string& text) {
std::size_t pattern_start = 0;
std::size_t pattern_end = 0;
std::size_t text_start = 0;
// check that the leading portion of the string matches
std::size_t first = pattern.find('*');
if(first == std::string::npos) return pattern == text;
if(pattern.substr(0, first) != text.substr(0, first)) return false;
text_start = first;
pattern_start = pattern_end = first + 1;
for(; pattern_end != pattern.size(); ++pattern_end) {
// split into blocks at '*'
if(pattern[pattern_end] == '*') {
// and search for each block
std::size_t size = pattern_end - pattern_start;
std::size_t off = text.find(pattern.data() + pattern_start, text_start, size);
// if not found, the pattern doesn't match
if(off == std::string::npos) return false;
text_start = off + size;
pattern_start = pattern_end + 1; // skip past the '*'
}
}
// check that the tails of the strings are the same
std::size_t tail_size = pattern_end - pattern_start;
return tail_size <= text.size() - text_start &&
pattern.substr(pattern_start, tail_size) == text.substr(text.size() - tail_size, tail_size);
}
// date-time
// The result is clamped to the range [0,30]
int boost::regression::timestamp_difference(const boost::posix_time::ptime& x, const boost::posix_time::ptime& y) {
boost::posix_time::time_duration diff = y - x;
int result = diff.hours() / 24;
if(result < 0) return 0;
else if(result > 30) return 30;
else return result;
}
std::string boost::regression::format_timestamp(const boost::posix_time::ptime& timestamp) {
std::ostringstream stream;
stream.imbue(std::locale(std::locale::classic(), new boost::posix_time::time_facet("%a, %d %b %Y %H:%M:%S +0000")));
stream << timestamp;
return stream.str();
}
// path
// FIXME: The result MUST be a valid filesystem path.
std::string boost::regression::encode_path(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
if(ch == '.' || ch == '/') {
ch = '-';
}
// FIXME: allow only characters from the following set:
// "[a-z][A-Z][0-9][-+_. ,()$!~?]...
result += ch;
}
return result;
}
std::string boost::regression::escape_uri(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
if (('a' <= ch && ch <= 'z') ||
('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') ||
ch == '-' || ch == '_' || ch == '~' || ch == '.' ||
// We're intentionally allowing '/' to go through.
// to escape it as well, use escape_literal_uri
ch == '/' ||
// FIXME: reserved characters
ch == '+')
result += ch;
else {
unsigned digit = ch;
ch &= 0xFF;
const char * xdigits = "0123456789ABCDEF";
result += '%';
result += xdigits[digit >> 4];
result += xdigits[digit & 0xF];
}
}
return result;
}
std::string boost::regression::escape_literal_uri(const std::string& path) {
std::string result;
BOOST_FOREACH(char ch, path) {
// FIXME: Assumes UTF-8
if (('a' <= ch && ch <= 'z') ||
('A' <= ch && ch <= 'Z') ||
('0' <= ch && ch <= '9') ||
ch == '-' || ch == '_' || ch == '~' || ch == '.')
result += ch;
else {
unsigned digit = ch;
ch &= 0xFF;
const char * xdigits = "0123456789ABCDEF";
result += '%';
result += xdigits[digit >> 4];
result += xdigits[digit & 0xF];
}
}
return result;
}
// okay
std::string boost::regression::output_file_path(const std::string& path) {
return("output/" + (encode_path(path) + ".html"));
}
// okay
std::string boost::regression::log_file_path(
const failures_markup_t& explicit_markup,
const test_structure_t::test_log_t& test_log,
const std::string& runner,
const std::string& release_postfix)
{
if(show_output(explicit_markup, test_log)) {
return output_file_path(runner + "-" + test_log.target_directory + release_postfix);
} else {
return "";
}
}
bool boost::regression::show_library(const failures_markup_t& explicit_markup, const std::string& library, bool release) {
return !release || !is_library_beta(explicit_markup, library);
}
bool boost::regression::show_output(const failures_markup_t& explicit_markup, const test_structure_t::test_log_t& test_log) {
return ((!test_log.result || test_log.show_run_output) ||
(test_log.result && !test_log.status))
&& !(is_unusable(explicit_markup, test_log.library, test_log.toolset));
}
bool boost::regression::show_toolset(const failures_markup_t& explicit_markup, const std::string& toolset, bool release) {
return !release || explicit_markup.required_toolsets.find(toolset) != explicit_markup.required_toolsets.end();
}
// safe: no assumptions, enumerated result
std::string boost::regression::result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_log_group_t& test_logs) {
if(is_unusable(explicit_markup, library, toolset)) {
return "unusable";
}
if(test_logs.empty()) {
return "missing";
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_result && !log->is_new) {
return "fail-unexpected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_result && log->is_new) {
return "fail-unexpected-new";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->expected_reason != "") {
return "fail-expected-unreasearched";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result) {
return "fail-expected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(log->result && !log->expected_result) {
return "success-unexpected";
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(log->result && log->expected_result) {
return "success-expected";
}
}
return "unknown";
}
// safe
std::string boost::regression::result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& test_logs)
{
test_log_group_t tmp;
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, test_logs) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference log, test_case.second) {
tmp.push_back(&log);
}
}
return result_cell_class(explicit_markup, library, toolset, tmp);
}
// requires: purpose must be well-formed html
void boost::regression::insert_report_header(
html_writer& document,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose)
{
document << "<div class=\"report-info\">\n";
document << " <div>\n";
document << " <b>Report Time: </b> " << format_timestamp(run_date) << "\n";
document << " </div>\n";
if(!purpose.empty()) {
document << " <div>\n";
document << " <b>Purpose: </b> " << purpose << "\n";
document << " </div>\n";
}
BOOST_FOREACH(const std::string& warning, warnings) {
document << " <div class=\"report-warning\">\n";
document << " <b>Warning: </b>\n";
document << " <a href=\"mailto:boost-testing@lists.boost.org?subject=[Report Pages]%20" << escape_literal_uri(warning) << " (" << format_timestamp(run_date) << ")\" class=\"warning-link\">\n";
document << " " << escape_xml(warning) << "\n";
document << " </a>\n";
document << " </div>\n";
}
document << "</div>\n";
}
// requires class_ is enumerated
void boost::regression::insert_view_link(html_writer& out, const std::string& page, const std::string& class_, bool release) {
if(release) {
out << "<a href=\"" << escape_uri(page) << ".html\" class=\"" << class_ << "\" target=\"_top\">"
"Full View"
"</a>\n";
} else {
out << "<a href=\"" << escape_uri(page) << "_release.html\" class=\"" << class_ << "\" target=\"_top\">"
"Release View"
"</a>";
}
}
// requires: mode = developer | user (Should be the opposite of the current page)
// requires: page is the base name of the current page. It should be valid
// according to encode_path, but should not be URI escaped.
void boost::regression::insert_page_links(html_writer& document,
const std::string& page,
bool release,
const std::string& mode) {
document << "<div class=\"links\">\n";
// yes, really. The class is set to ""
insert_view_link(document, page, "", release);
std::string release_postfix(release? "_release" : "");
document << "&#160;|&#160;"
"<a href=\"../" << mode << "/" << escape_uri(page) << release_postfix << ".html\" class=\"view-link\" target=\"_top\">"
<< mode << " View"
"</a>"
"&#160;|&#160;"
"<a href=\"" << escape_uri(page) << release_postfix << "_.html#legend\">Legend</a>\n"
"</div>\n";
}
// requires: mode = summary | details
// requires: top_or_bottom = top | bottom
void boost::regression::insert_runners_rows(html_writer& document,
const std::string& mode,
const std::string& top_or_bottom,
const test_structure_t& test_structure,
const boost::posix_time::ptime& run_date) {
std::string colspan = (mode == "summary") ? "1" : "2";
if(top_or_bottom == "top") {
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
std::size_t count = 0;
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
count += run.toolsets.size();
}
if(count > 0) {
document << " <td colspan=\"" << count << "\" class=\"runner\">\n"
" " << escape_xml(platform.first) << "\n"
" </td>\n";
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
}
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"runner\">\n"
" <a href=\"../" << escape_uri(encode_path(run.runner)) << ".html\">\n"
" " << escape_xml(run.runner) << "\n"
" </a>\n"
" </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
document << "<tr>\n"
"<td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"revision\">\n"
" rev " << run.revision.substr(0, 6) << "\n"
" </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
if(run.toolsets.size() > 0) {
int age = timestamp_difference(run.timestamp, run_date);
document << " <td colspan=\"" << run.toolsets.size() << "\" class=\"timestamp\">\n"
" <span class=\"timestamp-" << age << "\">" << format_timestamp(run.timestamp) << "</span>";
if(run.run_type != "full") {
document << "<span class=\"run-type-" << run.run_type << "\">" << run.run_type[0] << "</span>\n";
}
document << " </td>\n";
}
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
if(top_or_bottom == "bottom") {
document << "<tr>\n"
" <td colspan=\"" << colspan << "\">&#160;</td>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
std::size_t count = 0;
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
count += run.toolsets.size();
}
if(count > 0) {
document << " <td colspan=\"" << count << "\" class=\"runner\">\n"
" " << escape_xml(platform.first) << "\n"
" </td>\n";
}
}
document << " <td colspan=\"" << colspan << "\">&#160;</td>\n"
"</tr>\n";
}
}
// requires mode = summary | details
void boost::regression::insert_toolsets_row(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
const std::string& mode,
const boost::posix_time::ptime& run_date,
const std::string& library,
const boost::unordered_map<std::string, std::size_t>& library_marks) {
document << "<tr valign=\"middle\">\n";
std::string colspan = (mode == "summary") ? "1" : "2";
std::string title = (mode == "summary") ?
"&#160;library&#160;/&#160;toolset&#160;" :
"&#160;test&#160;/&#160;toolset&#160;";
document << " <td class=\"head\" colspan=\"" << colspan << "\" width=\"1%\">" << title << "</td>\n";
BOOST_FOREACH(const test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(const test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
std::string name = toolset.first;
std::string class_ = (explicit_markup.required_toolsets.find(name) != explicit_markup.required_toolsets.end())?
"required-toolset-name" :
"toolset-name";
document << "<td class=\"" << class_ << "\">\n";
int age = timestamp_difference(run.timestamp, run_date);
document << "<span class=\"timestamp-" << age << "\">\n";
// break toolset names into words
BOOST_FOREACH(char ch, name) {
document << ch;
if(ch == '-') {
document << ' ';
}
}
if(mode == "details") {
// <!-- prepare toolset notes -->
std::set<std::size_t> toolset_notes;
typedef boost::unordered_map<std::string, std::size_t>::const_reference ref_type;
BOOST_FOREACH(ref_type toolset_markup, library_marks.equal_range(name)) {
toolset_notes.insert(toolset_markup.second);
}
if(!toolset_notes.empty()) {
document << "<span class=\"super\">\n";
bool first = true;
BOOST_FOREACH(std::size_t note_index, toolset_notes) {
if(!first) document << ", "; else first = false;
document << "<a href=\"#" << escape_uri(library) << "-note-" << note_index << "\" title=\"Note " << note_index << "\">\n"
" " << note_index << "\n"
"</a>\n";
}
document << "</span>\n";
}
}
document << "</span>\n"
"</td>\n";
}
}
}
document << "<td class=\"head\" width=\"1%\">" << title << "</td>\n"
"</tr>\n";
}
namespace {
std::string get_note_attr(const test_structure_t::note_t& note, const std::string& name) {
if(const node_ptr* node = boost::get<node_ptr>(&note)) {
std::string result;
lookup_attr(*node, name, result);
return result;
} else {
return std::string();
}
}
}
// requires: if note is a string, it is well-formed html
void boost::regression::show_note(
html_writer& document,
const test_structure_t::note_t& note,
const std::string& references,
const failures_markup_t& explicit_markup)
{
document << "<div class=\"note\">\n";
std::string author = get_note_attr(note, "author");
std::string date = get_note_attr(note, "date");
document << " <span class=\"note-header\">\n";
if(author != "" && date != "") {
document << " [&#160;" << escape_xml(author) << "&#160;" << escape_xml(date) << "&#160;]\n";
} else if(author != "") {
document << " [&#160;" << escape_xml(author) << "&#160;]\n";
} else if(date != "") {
document << " [&#160;" << escape_xml(date) << "&#160;]\n";
}
document << " </span>\n";
if(references != "") {
// lookup references (refid="17,18")
std::vector<std::string> refs;
boost::algorithm::split(refs, references, boost::is_any_of(","));
BOOST_FOREACH(const std::string& refid, refs) {
boost::unordered_map<std::string, node_ptr>::const_iterator pos = explicit_markup.notes.find(refid);
if(pos != explicit_markup.notes.end()) {
write_contents(document, pos->second);
} else {
document << " " << escape_xml(refid) << "\n";
}
}
}
if(const node_ptr* node_note = boost::get<node_ptr>(&note)) {
write_contents(document, *node_note);
} else if(const std::string* string_note = boost::get<std::string>(&note)) {
document << *string_note; // not escaped--can contain html markup
}
document << "</div>\n";
}
// requires: any note that is a string contains well-formed html
void boost::regression::show_notes(html_writer& document,
const std::vector<test_structure_t::note_t>& notes,
const failures_markup_t& explicit_markup)
{
document << "<div class=\"notes\">\n";
BOOST_FOREACH(const test_structure_t::note_t& note, notes) {
document << " <div>\n";
std::string refid = get_note_attr(note, "refid");
::show_note(document, note, refid, explicit_markup);
document << " </div>\n";
}
document << "</div>\n";
}

109
reports/src/common.hpp Normal file
View File

@@ -0,0 +1,109 @@
// common.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef COMMON_HPP_INCLUDED
#define COMMON_HPP_INCLUDED
#include <vector>
#include <string>
#include <set>
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include "xml.hpp"
namespace boost {
namespace regression {
class html_writer;
typedef std::vector<const test_structure_t::test_log_t*> test_log_group_t;
bool is_library_beta(const failures_markup_t& explicit_markup, const std::string& library);
bool is_test_log_a_test_case(const test_structure_t::test_log_t& test_log);
bool is_unusable(const failures_markup_t& markup, const std::string& library, const std::string& toolset);
void get_unusable(const failures_markup_t& markup,
const std::string& library,
const test_structure_t& test_structure,
boost::unordered_map<std::string, std::size_t>& out,
std::vector<node_ptr>& notes);
bool re_match(const std::string& pattern, const std::string& text);
int timestamp_difference(const boost::posix_time::ptime& x, const boost::posix_time::ptime& y);
std::string format_timestamp(const boost::posix_time::ptime& timestamp);
std::string encode_path(const std::string& path);
std::string escape_uri(const std::string& path); // escapes a URI path (leaves '/' alone)
std::string escape_literal_uri(const std::string& path); // escapes all special characters in a URI
std::string output_file_path(const std::string& path);
std::string log_file_path(
const failures_markup_t& explicit_markup,
const test_structure_t::test_log_t& test_log,
const std::string& runner,
const std::string& release_postfix = "");
bool show_library(const failures_markup_t& explicit_markup, const std::string& library, bool release);
bool show_output(const failures_markup_t& markup, const test_structure_t::test_log_t& test_log);
bool show_toolset(const failures_markup_t& explicit_markup, const std::string& toolset, bool release);
void insert_report_header(html_writer& document,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose = "");
void insert_view_link(html_writer& document,
const std::string& page,
const std::string& class_,
bool release);
void insert_page_links(html_writer& document,
const std::string& page,
bool release,
const std::string& mode);
void insert_runners_rows(html_writer& document,
const std::string& mode,
const std::string& top_or_bottom,
const test_structure_t& test_structure,
const boost::posix_time::ptime& run_date);
void insert_toolsets_row(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
const std::string& mode,
const boost::posix_time::ptime& run_date,
const std::string& library = std::string(),
const boost::unordered_map<std::string, std::size_t>& library_marks = boost::unordered_map<std::string, std::size_t>());
void show_note(
html_writer& document,
const test_structure_t::note_t& note,
const std::string& references,
const failures_markup_t& explicit_markup);
void show_notes(html_writer& document, const std::vector<test_structure_t::note_t>& notes, const failures_markup_t& explicit_markup);
std::string result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_log_group_t& test_logs);
std::string result_cell_class(const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& test_logs);
std::string alternate_mode(const std::string& mode);
std::string release_postfix(bool is_release);
void get_libraries(const test_structure_t& test_structure, std::set<std::string>& out);
}
}
#endif

330
reports/src/html.cpp Normal file
View File

@@ -0,0 +1,330 @@
// html.cpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed Under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "html.hpp"
const char* const boost::regression::issues_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
"<tr>\n"
" <td>\n"
" <table border=\"0\" summary=\"legend\">\n"
" <tr>\n"
" <td>\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new\">&lt;toolset&gt;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-item\">Failure on a newly added test/compiler.</td>\n"
" </tr>\n"
" <tr>\n"
" <td>\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected\">&lt;toolset&gt;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-item\">Unexpected failure.</td>\n"
" </tr>\n"
" </table>\n"
" </td>\n"
"</tr>\n"
"</table>\n"
"</div>\n"
;
const char* const boost::regression::library_user_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-expected user-library-success-expected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">The test successfully passes.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected user-library-fail-expected\"><u>fail*</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>known failure</b> that the library maintainers are aware about. Please follow the link to \n"
" find out how it affects the library's functionality.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-unusable user-library-unusable\">unusable</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as <b>unusable</b> on this particular platform/toolset. Please\n"
" see the corresponding footnote.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unresearched legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected-unresearched user-library-fail-expected-unresearched\"><u>fail?</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" An <b>unsearched failure</b>: the library maintainers are aware of it, but need help with \n"
" investigating/addressing it for future releases. Please follow the link to \n"
" access the details and find out how it affects library functionality. </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new user-library-fail-unexpected-new\"><u>fail</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>new failure</b> on the test/compiler added in this release that hasn't been accounted for yet. \n"
" Please follow the link to access the details.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single user-library-row-single\"><td class=\"library-fail-unexpected\"><u>fail</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" A <b>regression</b> comparing to the previous release. Please follow the link to \n"
" access the details.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" </td>\n"
" <td class=\"legend-explanation\"></td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char* const boost::regression::library_developer_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-expected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Success.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected pass legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-success-unexpected\">pass</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unexpected success; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected\">fail*</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Expected failure; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-unusable\">n/a</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">The library author marked it as unusable on this particular platform/toolset.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unresearched legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-expected-unresearched\">fail?</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unsearched failure; follow the link for more details.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected-new\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Failure on a newly added test/compiler.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"library-row-single\"><td class=\"library-fail-unexpected\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">Unexpected failure/regression.</td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" </td>\n"
" <td class=\"legend-explanation\"></td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char * const boost::regression::summary_user_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-expected user-summary-success-expected\">&#160;pass&#160;</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All library's tests pass.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"expected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-expected user-summary-fail-expected\"><u>details</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Most of the library's tests pass, but there are some <b>known failures</b> which might affect the library's\n"
" functionality. Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected-new user-summary-fail-unexpected-new\"><u>details</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Some of the <b>newly added</b> library's tests fail, or some of the library's tests fail on\n"
" the <b>newly added compiler</b>, or some of the tests fail due to <b>unresearched \n"
" reasons</b>. Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected user-summary-fail-unexpected\"><u>regress.</u></td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" There are some <b>regressions</b> in the library comparing to the previous release. \n"
" Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-unusable user-summary-unusable\">unusable</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as <b>unusable</b> on the particular platform/toolset.\n"
" Please follow the link to see the detailed report.\n"
" </td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\" id=\"Table1\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;
const char * const boost::regression::summary_developer_legend =
"<div class=\"legend\">\n"
"<table border=\"0\" summary=\"report description\">\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"success legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-expected\">OK</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All expected tests pass.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected pass legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-success-unexpected\">OK</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" All expected tests pass, and some other tests that were expected to fail \n"
" unexpectedly pass as well.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected new fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected-new\">fail</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" There are some failures on the newly added tests/compiler(s).\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unexpected fail legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-fail-unexpected\">broken</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" Tests that the library author expects to pass are currently failing.\n"
" </td>\n"
" </tr>\n"
" <tr>\n"
" <td class=\"legend-item\">\n"
" <table width=\"100%\" summary=\"unusable legend\">\n"
" <tr class=\"summary-row-single\"><td class=\"summary-unusable\">n/a</td></tr>\n"
" </table>\n"
" </td>\n"
" <td class=\"legend-explanation\">\n"
" The library author marked it as unusable on particular platform/toolset.\n"
" </td>\n"
" </tr>\n"
"</table>\n"
"<hr/>\n"
"<table border=\"0\" summary=\"report description\" id=\"Table1\">\n"
" <tr>\n"
" <td><span class=\"run-type-incremental\">i</span></td>\n"
" <td class=\"legend-explanation\">An incremental run.</td>\n"
" </tr>\n"
"</table>\n"
"</div>\n"
;

25
reports/src/html.hpp Normal file
View File

@@ -0,0 +1,25 @@
// html.hpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef HTML_HPP_INCLUDED
#define HTML_HPP_INCLUDED
namespace boost {
namespace regression {
extern const char* const issues_legend;
extern const char* const library_user_legend;
extern const char* const library_developer_legend;
extern const char* const summary_user_legend;
extern const char* const summary_developer_legend;
}
}
#endif

View File

@@ -0,0 +1,70 @@
// html_writer.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef HTML_WRITER_HPP_INCLUDED
#define HTML_WRITER_HPP_INCLUDED
#include <boost/filesystem/path.hpp>
#include <boost/filesystem/convenience.hpp>
#include <boost/iostreams/device/file_descriptor.hpp>
#include <boost/noncopyable.hpp>
#include <boost/shared_ptr.hpp>
#include "zip.hpp"
#include <cstring>
extern boost::shared_ptr<boost::zip::zip_archive> global_zip;
namespace boost {
namespace regression {
class html_writer : boost::noncopyable {
public:
// path must be UTF-8 encoded. The separator is '/'
explicit html_writer(const std::string& path)
: sink(*global_zip, path)
{}
~html_writer() {
}
html_writer& operator<<(const std::string& arg) {
sink.write(arg.data(), arg.size());
return *this;
}
html_writer& operator<<(const char* arg) {
sink.write(arg, ::std::strlen(arg));
return *this;
}
html_writer& operator<<(char arg) {
sink.write(&arg, 1);
return *this;
}
html_writer& operator<<(std::size_t arg) {
char buf[30];
char* pos = &buf[0] + 30;
if(arg == 0) {
*--pos = '0';
}
for(; arg > 0; arg /= 10) {
*--pos = static_cast<char>('0' + (arg % 10));
}
sink.write(pos, buf + 30 - pos);
return *this;
}
html_writer& operator<<(int arg) {
if(arg < 0) *this << '-' << std::size_t(-arg);
else *this << std::size_t(arg);
return *this;
}
private:
boost::zip::nocompression_sink sink;
};
}
}
#endif

294
reports/src/issues_page.cpp Normal file
View File

@@ -0,0 +1,294 @@
// issues_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2004.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "issues_page.hpp"
#include "html_writer.hpp"
#include "xml.hpp"
#include "html.hpp"
#include "common.hpp"
#include <map>
#include <string>
#include <boost/foreach.hpp>
#include <boost/filesystem/fstream.hpp>
#include <boost/date_time/posix_time/posix_time_io.hpp>
using namespace boost::regression;
typedef std::pair<const test_structure_t::test_log_t*, const std::string*> test_failure_t;
typedef std::map<std::string, std::map<std::string, std::vector<test_failure_t> > > library_test_names_t;
typedef std::map<std::string, library_test_names_t> libraries_t;
namespace {
void failing_tests(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
libraries_t& out)
{
typedef boost::unordered_map<std::string, test_structure_t::platform_t> test_structure_top;
BOOST_FOREACH(test_structure_top::const_reference platform, tests.platforms) {
BOOST_FOREACH(const test_structure_t::run_t& runs, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, runs.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference test_log, test_case.second) {
if(test_log.status == false && test_log.result == false &&
explicit_markup.required_toolsets.find(test_log.toolset) != explicit_markup.required_toolsets.end() &&
is_test_log_a_test_case(test_log) &&
show_library(explicit_markup, library.first, release) &&
show_toolset(explicit_markup, toolset.first, release) &&
!(is_unusable(explicit_markup, library.first, toolset.first))) {
out[library.first][test_log.test_name][test_log.toolset].push_back(std::make_pair(&test_log, &runs.runner));
}
}
}
}
}
}
}
}
std::size_t count_failures(const library_test_names_t& library) {
std::size_t result = 0;
BOOST_FOREACH(library_test_names_t::const_reference test, library) {
BOOST_FOREACH(library_test_names_t::mapped_type::const_reference toolset, test.second) {
result += toolset.second.size();
}
}
return result;
}
// okay
void write_issues_list_reference_file(const std::string& out,
const std::string& source,
bool release,
const std::string& issues_list)
{
html_writer document(out);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n";
document << "<html>\n";
document << " <head>\n";
document << " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n";
document << " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n";
document << " <title>Boost regression unresolved issues: " << source << "</title>\n";
document << " </head>\n";
document << " <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n";
document << " <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n";
document << " <frame name=\"docframe\" src=\"" << issues_list << "\" scrolling=\"auto\"/>\n";
document << " </frameset>\n";
document << "</html>\n";
}
void print_failure_cell(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& output_directory,
const test_structure_t::test_log_t& test_log,
const std::string& toolset,
const std::string& runner,
const std::string& release_postfix)
{
std::string log_link = log_file_path(explicit_markup, test_log, runner, release_postfix);
const char* class_ = test_log.is_new?
"library-fail-unexpected-new" :
"library-fail-unexpected";
document << "<td class=\"" << class_ << "\">\n";
document << " <span>\n";
document << " <a href=\"" << escape_uri(log_link) << "\" class=\"log-link\" target=\"_top\">\n";
document << " " << escape_xml(toolset) << "\n";
document << " </a>\n";
document << " </span>\n";
document << "</td>\n";
}
void write_issues_list(const std::string& path,
const failures_markup_t& explicit_markup,
const std::string& output_directory,
const libraries_t& libraries,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose,
bool release)
{
//utils::log("Writing document " + path);
const char* release_postfix = release? "_release" : "";
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n";
document << "<html>\n";
document << " <head>\n";
document << " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n";
document << " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n";
document << " <title>Boost regression unresolved issues: " << source << "</title>\n";
document << " </head>\n";
document << " <body>\n";
document << "\n";
document << " <h1 class=\"page-title\">\n";
document << " Unresolved Issues: \n";
document << " <a class=\"hover-link\" href=\"summary" << release_postfix << ".html\" target=\"_top\">" << source << "</a>\n";
document << " </h1>\n";
document << "\n";
insert_report_header(document, run_date, warnings, purpose);
// Emit the index
document << " <h2>Libraries with unresolved failures</h2>\n";
document << " <div align=\"center\">\n";
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
document << " <a href=\"#" << escape_uri(library.first) << "\">\n";
document << " " << escape_xml(library.first) << "\n";
document << " </a>\n";
}
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
std::string library_page(encode_path(library.first));
const library_test_names_t& library_test_names(library.second);
std::size_t failures = count_failures(library.second);
document << " <h2>\n";
document << " <a name=\"" << escape_uri(library.first) << "\"/>\n";
document << " <a class=\"hover-link\" href=\"" << escape_uri(library_page)
<< release_postfix << ".html\" target=\"_top\">\n";
document << " " << escape_xml(library.first) << " (" << failures
<< " failure" << (failures == 1? "":"s") << ")\n";
document << " </a>\n";
document << " </h2>\n";
document << " <table class=\"library-issues-table\" summary=\"issues\">\n";
document << " <thead>\n";
document << " <tr valign=\"middle\">\n";
document << " <td class=\"head\">test</td>\n";
document << " <td class=\"head\">failures</td>\n";
document << " </tr>\n";
document << " </thead>\n";
document << " <tfoot>\n";
document << " <tr valign=\"middle\">\n";
document << " <td class=\"head\">test</td>\n";
document << " <td class=\"head\">failures</td>\n";
document << " </tr>\n";
document << " </tfoot>\n";
document << " <tbody>\n";
BOOST_FOREACH(library_test_names_t::const_reference test, library_test_names) {
const std::string& test_name = test.first;
const std::string& test_program = test.second.begin()->second.front().first->test_program;
document << " <tr>\n";
document << " <td class=\"test-name\">\n";
document << " <a href=\"https://github.com/boostorg/boost/blob/"
<< source << "/" << escape_uri(test_program) << "\" class=\"test-link\" target=\"_top\">\n";
document << " " << escape_xml(test_name) << "\n";
document << " </a>\n";
document << " </td>\n";
document << " <td class=\"failures-row\">\n";
document << " <table summary=\"unexpected fail legend\" class=\"issue-box\">\n";
document << " <tr class=\"library-row-single\">\n";
typedef library_test_names_t::mapped_type::const_reference toolset_t;
BOOST_FOREACH(toolset_t toolset, test.second) {
BOOST_FOREACH(const test_failure_t& failure, toolset.second) {
print_failure_cell(document, explicit_markup, output_directory, *failure.first, toolset.first, *failure.second, release_postfix);
}
}
document << " </tr>\n";
document << " </table>\n";
document << " </td>\n";
document << " </tr>\n";
}
document << " </tbody>\n";
document << " </table>\n";
}
document << " </div>\n";
document << issues_legend;
document << " </body>\n";
document << "</html>\n";
}
// okay
void write_issues_email(const std::string& path,
const boost::posix_time::ptime& run_date,
const std::string& source,
const libraries_t& libraries)
{
boost::filesystem::ofstream document(path);
std::cout << "Writing document " << path << std::endl;
//utils::log(boost::format("Writing document %s") % path);
std::size_t failing_tests = 0;
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
failing_tests += count_failures(library.second);
}
document << "Boost regression test failures\n"
"------------------------------\n"
"Report time: " << run_date << "\n"
"\n"
"This report lists all regression test failures on release platforms.\n"
"\n"
"Detailed report: \n"
" http://beta.boost.org/development/tests/" << source << "/developer/issues.html\n"
"\n"
<< failing_tests << " failure" << (failing_tests == 1? "" : "s")
<< " in " << libraries.size() << " librar" << (libraries.size() == 1? "y" : "ies") << ":\n";
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
document << " " << library.first << " (" << count_failures(library.second) << ")\n";
}
BOOST_FOREACH(libraries_t::const_reference library, libraries) {
std::string library_page = encode_path(library.first);
document << "\n"
"|" << library.first << "|\n";
BOOST_FOREACH(libraries_t::mapped_type::const_reference test_name, library.second) {
document << " " << test_name.first << ":";
BOOST_FOREACH(libraries_t::mapped_type::mapped_type::const_reference toolset, test_name.second) {
document << " " << toolset.first;
}
document << "\n";
}
}
}
}
void boost::regression::issues_list(const std::string& output_dir,
const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose)
{
::libraries_t libraries;
failing_tests(tests, explicit_markup, release, libraries);
std::string release_postfix_(release_postfix(release));
std::string issues_list("issues" + release_postfix_ + "_.html");
write_issues_list_reference_file(output_dir + "/issues.html", source, release, issues_list);
write_issues_list(output_dir + "/" + issues_list, explicit_markup, output_dir, libraries, source, run_date, warnings, purpose, release);
write_issues_email(output_dir + "/issues-email.txt",
run_date,
source,
libraries);
}

View File

@@ -0,0 +1,31 @@
// issues_page.hpp
//
// Copyright (c) 2010
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "xml.hpp"
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
namespace boost {
namespace regression {
void issues_list(const std::string& output_dir,
const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& purpose);
}
}

369
reports/src/links_page.cpp Normal file
View File

@@ -0,0 +1,369 @@
// Copyright MetaCommunications, Inc. 2003-2006.
// Copyright Steven Watanabe 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "links_page.hpp"
#include "xml.hpp"
#include "common.hpp"
#include "html_writer.hpp"
#include "html.hpp"
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/filesystem/path.hpp>
#include <boost/foreach.hpp>
#include <string>
#include <vector>
using namespace boost::regression;
namespace {
void links_page(const failures_markup_t& explicit_markup,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp,
const std::string& library_name,
const std::string& toolset_name,
const std::string& test_name,
const std::vector<test_structure_t::test_log_t>& test_logs);
void write_variants_reference_file(const std::string& path,
const std::string& variants_file_path,
const std::string release_postfix,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id);
std::string output_page_header(node_ptr test_log, const std::string& runner_id);
void write_variants_file(const failures_markup_t& explicit_markup,
const std::string& path,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp);
void write_test_result_file(const failures_markup_t& explicit_markup,
const std::string& path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp);
void write_test_results_reference_file(const std::string& path,
const std::string& log_file_path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id);
// requires: revision must be a SVN revision. i.e. of the form nnnnn
void links_page(const failures_markup_t& explicit_markup,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp,
const std::string& library_name,
const std::string& toolset_name,
const std::string& test_name,
const std::vector<test_structure_t::test_log_t>& test_logs) {
//utils::log("Processing test \"" + runner_id + "/" + library_name + "/" + test_name + "/" + toolset_name + "\"");
const char* postfixes[] = {"", "_release"};
const char* dirs[] = { "developer", "user" };
if(test_logs.size() > 1) {
// utils::log(" Processing variants");
std::string variants_file_path = output_file_path(runner_id + "-" + library_name + "-" + toolset_name + "-" + test_name + "-variants");
write_variants_file(explicit_markup, variants_file_path, test_logs, runner_id, revision, timestamp);
BOOST_FOREACH(const std::string& release_postfix, postfixes) {
BOOST_FOREACH(const std::string& directory, dirs) {
std::string variants__file_path = directory + "/" + (encode_path(runner_id + "-" + library_name + "-" + toolset_name + "-" + test_name + "-variants_" + release_postfix) + ".html");
write_variants_reference_file(variants__file_path, "../" + variants_file_path, release_postfix, test_logs, runner_id);
}
}
}
BOOST_FOREACH(const test_structure_t::test_log_t& test_log, test_logs) {
//utils::log(" Processing test-log");
if(show_output(explicit_markup, test_log)) {
std::string log_path = log_file_path(explicit_markup, test_log, runner_id);
write_test_result_file(explicit_markup, log_path, test_log, runner_id, revision, timestamp);
BOOST_FOREACH(const std::string& release_postfix, postfixes) {
BOOST_FOREACH(const std::string& directory, dirs) {
std::string reference_file_path = directory + "/" + log_file_path(explicit_markup, test_log, runner_id, release_postfix);
write_test_results_reference_file(reference_file_path, log_path, test_log, runner_id);
}
}
}
}
}
// okay. result is unconstrained
std::string output_page_header(const test_structure_t::test_log_t& test_log, const std::string& runner_id) {
if(test_log.test_name != "") {
return runner_id + " - " + test_log.library + " - " + test_log.test_name + " / " + test_log.toolset;
} else {
return test_log.target_directory;
}
}
// requires: path must be a valid file path.
// requires: variants_file_path must be the path to the variants file relative to path
void write_variants_reference_file(const std::string& path,
const std::string& variants_file_path,
const std::string release_postfix,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id)
{
//utils::log(" Writing variants reference file %s" % path);
std::string component = output_page_header(test_logs[0], runner_id);
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << escape_uri(variants_file_path) << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
// requires revision is an SVN revision #
// requires path is a valid path
void write_variants_file(const failures_markup_t& explicit_markup,
const std::string& path,
const std::vector<test_structure_t::test_log_t>& test_logs,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp)
{
//utils::log(" Writing variants file " + path.string());
html_writer document(path);
std::string component = output_page_header(test_logs[0], runner_id);
int age = 0; // timestamp_difference(timestamp, run_date);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <body>\n"
" <div class=\"log-test-header\">\n"
" <div class=\"log-test-title\">\n"
" Test output: " << escape_xml(component) << "\n"
" </div>\n"
" <div><span class=\"timestamp-" << age << "\">\n"
" Rev " << revision << " /\n"
" " << format_timestamp(timestamp) << "\n"
" </span></div>\n"
" </div>\n"
"\n"
" <p>Output by test variants:</p>\n"
" <table>\n";
BOOST_FOREACH(const test_structure_t::test_log_t& log, test_logs) {
document << " <tr>\n"
" <td>\n";
std::string log_file = log_file_path(explicit_markup, log, runner_id);
if(!log_file.empty()) {
document << " <a href=\"../" << escape_uri(log_file) << "\">\n"
" " << escape_xml(log.target_directory) << "\n"
" </a>\n";
} else {
document << " " << escape_xml(log.target_directory) << "\n";
}
document << " </td>\n"
" </tr>\n";
}
document << " </table>\n"
" </body>\n"
"</html>\n";
}
// okay
const test_structure_t::target_t* lookup_target(const test_structure_t::test_log_t& test_log, const std::string& name) {
boost::unordered_map<std::string, test_structure_t::target_t>::const_iterator pos = test_log.targets.find(name);
if(pos != test_log.targets.end()) {
return &pos->second;
} else {
return 0;
}
}
// requires: path is a valid path
// requires: revision is an SVN revision
void write_test_result_file(const failures_markup_t& explicit_markup,
const std::string& path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id,
const std::string& revision,
const boost::posix_time::ptime& timestamp)
{
//utils::log(boost::format(" Writing log file document %s") % path);
html_writer document(path);
std::string component = output_page_header(test_log, runner_id);
int age = 0; // timestamp_difference(timestamp, run_date);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n";
document << " <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n";
document << " <body>\n"
" <div class=\"log-test-header\">\n"
" <div class=\"log-test-title\">\n"
" Test output: " << escape_xml(component) << "\n"
" </div>\n"
" <div><span class=\"timestamp-" << age << "\">\n"
" Rev " << revision << " /\n"
" " << format_timestamp(timestamp) << "\n"
" </span></div>\n"
" </div>\n";
if(!test_log.notes.empty()) {
document << " <div class=\"notes\">\n"
" <div class=\"notes-title\">Notes</div>\n";
show_notes(document, test_log.notes, explicit_markup);
document << " </div>\n";
}
if(const test_structure_t::target_t* compile = lookup_target(test_log, "compile")) {
const char* compile_result = compile->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-compiler-output-title\">Compile [" << escape_xml(compile->timestamp) << "]:"
" <span class=\"output-" << compile_result << "\">" << compile_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, compile->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* link = lookup_target(test_log, "link")) {
const char* link_result = link->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Link [" << escape_xml(link->timestamp) << "]:"
" <span class=\"output-" << link_result << "\">" << link_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, link->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* lib = lookup_target(test_log, "lib")) {
const char* lib_result = lib->result? "succeed" : "fail";
std::string lib_name(lib->contents->value(), lib->contents->value_size());
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Lib [" << escape_xml(lib->timestamp) << "]:"
" <span class=\"output-" << lib_result << "\">" << lib_result << "</span></div>\n";
document << " <p>\n";
document << " See <a href=\"" << escape_uri(encode_path(runner_id + "-" + lib_name)) << ".html\">\n";
document << " " << escape_xml(lib_name) << "\n";
document << " </a>\n";
document << " </p>\n";
document << " </div>\n";
}
if(const test_structure_t::target_t* run = lookup_target(test_log, "run")) {
const char* run_result = run->result? "succeed" : "fail";
document << " <div>\n";
document << " <div class=\"log-linker-output-title\">Run [" << escape_xml(run->timestamp) << "]:"
" <span class=\"output-" << run_result << "\">" << run_result << "</span></div>\n";
document << " <pre>\n";
write_contents(document, run->contents, true);
document << " </pre>\n";
document << " </div>\n";
}
document << " </body>\n";
document << "</html>\n";
}
// requires path is a valid path
// requires: log_file_path is the location of the log file relative to path
void write_test_results_reference_file(const std::string& path,
const std::string& log_file_path,
const test_structure_t::test_log_t& test_log,
const std::string& runner_id)
{
std::string component = output_page_header(test_log, runner_id);
html_writer document(path);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Test output: " << escape_xml(component) << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"../toc.html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"../../" << escape_uri(log_file_path) << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
}
// okay
void boost::regression::links_page(
const failures_markup_t& explicit_markup,
const test_structure_t::run_t& test_run)
{
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, test_run.toolsets) {
BOOST_FOREACH(const test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(const test_structure_t::library_t::const_reference test_case, library.second) {
::links_page(explicit_markup,
test_run.runner,
test_run.revision,
test_run.timestamp,
library.first,
toolset.first,
test_case.first,
test_case.second);
}
}
}
BOOST_FOREACH(const test_structure_t::toolset_group_t::const_reference toolset, test_run.non_test_case_targets) {
BOOST_FOREACH(const test_structure_t::toolset_t::const_reference library, toolset.second) {
BOOST_FOREACH(const test_structure_t::library_t::const_reference test_case, library.second) {
::links_page(explicit_markup,
test_run.runner,
test_run.revision,
test_run.timestamp,
library.first,
toolset.first,
test_case.first,
test_case.second);
}
}
}
}

View File

@@ -0,0 +1,25 @@
// links_page.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef LINKS_PAGE_HPP_INCLUDED
#define LINKS_PAGE_HPP_INCLUDED
#include "xml.hpp"
#include <boost/filesystem/path.hpp>
namespace boost {
namespace regression {
void links_page(
const failures_markup_t& explicit_markup,
const test_structure_t::run_t& test_run);
}
}
#endif

View File

@@ -0,0 +1,28 @@
// Copyright MetaCommunications, Inc. 2003-2005.
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "produce_expected_results.hpp"
#include "html_writer.hpp"
#include <iostream>
void boost::regression::produce_expected_results(const test_structure_t& tests) {
std::cout << "Warning: expected results not implemented" << std::endl;
return;
html_writer document("expected_results.xml");
document << "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n";
document << "<root>\n";
document << " <expected-failures>\n";
#if 0
foreach test-log
<xsl:if test="meta:is_test_log_a_test_case(.)">
<test-result library="{@library}" test-name="{@test-name}" toolset="{@toolset}" result="{@result}" />
</xsl:if>
#endif
document << " </expected-failures>\n";
document << "</root>\n";
}

View File

@@ -0,0 +1,22 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_PRODUCE_EXPECTED_RESULTS_HPP
#define BOOST_REGRESSION_PRODUCE_EXPECTED_RESULTS_HPP
#include "xml.hpp"
namespace boost {
namespace regression {
void produce_expected_results(const test_structure_t& tests);
}
}
#endif

525
reports/src/result_page.cpp Normal file
View File

@@ -0,0 +1,525 @@
// result_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010-2011
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "result_page.hpp"
#include "common.hpp"
#include "html.hpp"
#include "html_writer.hpp"
#include "xml.hpp"
#include <boost/foreach.hpp>
#include <boost/unordered_map.hpp>
#include <set>
#include <map>
#include <string>
#include <vector>
#include <utility>
#include <iostream>
#include <fstream>
using namespace boost::regression;
namespace {
// safe: no assumptions, no unconstrained output
void test_type_col(html_writer& document, const std::string& test_type) {
document << "<td class=\"test-type\">\n";
document << " <a href=\"http://www.boost.org/status/compiler_status.html#Understanding\" class=\"legend-link\" target=\"_top\">";
if(test_type == "run_pyd") {
document << "r";
} else if(test_type == "run_mpi") {
document << "r";
} else if(test_type == "run") {
document << "r";
} else if(test_type == "run_fail") {
document << "rf";
} else if(test_type == "compile") {
document << "c";
} else if(test_type == "compile_fail") {
document << "cf";
} else if(test_type == "link") {
document << "l";
} else if(test_type == "link_fail") {
document << "lf";
} else {
throw std::runtime_error("Incorrect test type \"" + test_type + "\"");
}
document << " </a>\n";
document << "</td>\n";
}
// category/name
typedef std::pair<std::string, std::string> test_case_id_t;
// runner/toolset
typedef std::pair<std::string, std::string> test_toolset_id_t;
typedef std::vector<const test_structure_t::test_log_t*> test_log_group_t;
typedef boost::unordered_map<test_toolset_id_t, test_log_group_t> test_logs_by_run_t;
typedef std::map<test_case_id_t, test_logs_by_run_t> test_logs_t;
// requires: result contains no HTML special characters
// requires: log_link must not contain a '/' derived from the input (This won't actually break anything, though)
void insert_cell_link(html_writer& document, const std::string& result, const std::string& log_link) {
if(log_link != "") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(log_link) << "\" class=\"log-link\" target=\"_top\">"
<< result <<
"</a>"
"&#160;&#160;";
} else {
document << "&#160;&#160;" << result << "&#160;&#160;";
}
}
// requires:
void insert_cell_developer(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const std::string& test_name,
const std::string& runner,
const std::string& toolset,
const test_log_group_t& test_logs) {
std::string class_ = "library-" + result_cell_class(explicit_markup, library, toolset, test_logs);
std::string cell_link = (test_logs.size() > 1)?
encode_path(runner + "-" + library + "-" + toolset + "-" + test_logs.front()->test_name + "-variants_" + release_postfix(release)) + ".html" :
(test_logs.empty())?
std::string("") :
log_file_path(explicit_markup, *test_logs.front(), runner, release_postfix(release));
document << "<td class=\"" << class_ << "\" title=\"" << escape_xml(test_name) << "/" << escape_xml(toolset) << "\">\n";
if(is_unusable(explicit_markup, library, toolset)) {
insert_cell_link(document, "n/a", cell_link);
} else if(test_logs.empty()) {
document << "&#160;&#160;&#160;&#160;\n";
} else {
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->status) {
insert_cell_link(document, (log->expected_reason != "")? "fail?" : "fail*", cell_link);
goto done;
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && !log->status) {
insert_cell_link(document, "fail", cell_link);
goto done;
}
}
insert_cell_link(document, "pass", cell_link);
}
done:
document << "</td>\n";
}
// requires:
void insert_cell_user(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const std::string& test_name,
const std::string& runner,
const std::string& toolset,
const test_log_group_t& test_logs) {
std::string class_ = "library-" + result_cell_class(explicit_markup, library, toolset, test_logs);
std::string cell_link = (test_logs.size() > 1)?
encode_path(runner + "-" + library + "-" + toolset + "-" + test_logs.front()->test_name + "-variants_" + release_postfix(release)) + ".html" :
(test_logs.empty())?
std::string("") :
log_file_path(explicit_markup, *test_logs.front(), runner, release_postfix(release));
document << "<td class=\"" << class_ << " user-" << class_ << "\" title=\"" << escape_xml(test_name) << "/" << escape_xml(toolset) << "\">\n";
if(is_unusable(explicit_markup, library, toolset)) {
insert_cell_link(document, "unusable", cell_link);
} else if(test_logs.empty()) {
document << "&#160;&#160;&#160;&#160;\n";
} else {
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && log->status) {
insert_cell_link(document, (log->expected_reason != "")? "fail?" : "fail*", cell_link);
goto done;
}
}
BOOST_FOREACH(test_log_group_t::value_type log, test_logs) {
if(!log->result && !log->status) {
insert_cell_link(document, "fail", cell_link);
goto done;
}
}
insert_cell_link(document, "pass", cell_link);
}
done:
document << "</td>\n";
}
// requires: line_mod should be from an enumerated set
// requires: source is a Git branch name
// requires: mode = developer | user
void insert_test_line(html_writer& document,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
test_logs_t::const_reference test_results,
const std::vector<std::pair<std::string, std::string> >& all_toolsets,
const std::string& line_mod,
const std::string& source,
const std::string& mode) {
// This is guaranteed to exist because of the way the nested maps are created
const test_structure_t::test_log_t * first_log = (*test_results.second.begin()).second.front();
std::string test_program(first_log->test_program);
std::string::size_type pos = test_program.find(library);
if (pos != std::string::npos)
test_program.erase(0, pos + library.size());
std::string test_header =
"<td class=\"test-name\">\n"
" <a href=\"https://github.com/boostorg/" + library + "/blob/" + source + escape_uri(test_program) + "\" class=\"test-link\" target=\"_top\">\n"
" " + escape_xml(test_results.first.second) + "\n" // FIXME: sanitize test name
" </a>\n"
"</td>\n";
document << "<tr class=\"library-row" << line_mod << "\">\n"
<< test_header;
test_log_group_t empty_test_log;
test_type_col(document, first_log->test_type);
BOOST_FOREACH(const test_toolset_id_t& run, all_toolsets) {
const std::string& toolset = run.second;
const std::string& runner = run.first;
test_logs_by_run_t::const_iterator pos = test_results.second.find(run);
const test_log_group_t* test_result_for_toolset =
(pos != test_results.second.end())?
&pos->second :
&empty_test_log;
if(mode == "user") {
insert_cell_user(document, explicit_markup, release, library, test_results.first.second, runner, toolset, *test_result_for_toolset);
} else {
insert_cell_developer(document, explicit_markup, release, library, test_results.first.second, runner, toolset, *test_result_for_toolset);
}
}
document << test_header
<< "</tr>\n";
}
// requires: source is a Git branch name
// requires: mode = developer | user
void insert_test_section(html_writer& document,
const test_structure_t& test_structure,
const failures_markup_t& explicit_markup,
bool release,
const std::string& library,
const test_logs_t& logs,
const std::vector<std::pair<std::string, std::string> >& all_toolsets,
const std::string& source,
const std::string& mode) {
std::size_t category_span = 3;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, test_structure.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
category_span += (run.toolsets.size());
}
}
for(test_logs_t::const_iterator pos = logs.begin(), end = logs.end(); pos != end; ++pos) {
std::string test_name = pos->first.second;
bool category_start = (pos == logs.begin()) || (pos->first.first != boost::prior(pos)->first.first);
bool category_end = (boost::next(pos) == end) || (pos->first.first != boost::next(pos)->first.first);
std::string line_mod =
(category_start && category_end)? "-single" :
category_start? "-first" :
category_end? "-last" :
"";
if(category_start && pos->first.first != "0") {
document << "<tr>\n"
" <td class=\"library-test-category-header\" colspan=\"" << category_span << "\" align=\"center\">\n"
" " << escape_xml(pos->first.first) << "\n"
" </td>\n"
"</tr>\n";
}
insert_test_line(document, explicit_markup, release, library, *pos, all_toolsets, line_mod, source, mode);
}
}
}
// requires: mode = developer | user
// requires: source = Boost SVN branch name
void boost::regression::result_page(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& mode,
const boost::filesystem::path& comment_file)
{
// skip debug.xml
std::string index_path("index" + release_postfix(release) + "_.html");
{
std::cout << "Writing document " << "index" << release_postfix(release) << ".html" << std::endl;
html_writer index(mode + "/" + "index" + release_postfix(release) + ".html");
index << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << source << "</title>\n"
"</head>\n"
"<frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << index_path << "\" scrolling=\"auto\"/>\n"
"</frameset>\n"
"</html>\n";
}
std::cout << "Writing document " << index_path << std::endl;
{
html_writer index(mode + "/" + index_path);
index << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << source << "</title>\n"
"</head>\n"
"<body>\n"
"\n"
" <img border=\"0\" src=\"http://www.boost.org/boost.png\" width=\"277\" height=\"86\" align=\"right\" alt=\"Boost logo\"></img>\n"
"\n"
" <h1 class=\"page-title\">\n"
<< mode << " report: "
" <a class=\"hover-link\" href=\"summary.html\" target=\"_top\">" << source << "</a>\n"
" </h1>\n"
"\n";
std::string purpose = (mode == "user")?
"The purpose of this report is to help a user to find out whether a particular library "
"works on the particular compiler(s). For SVN \"health report\", see "
" <a href=\"../developer/index.html\" target=\"_top\">developer summary</a>."
:
"Provides Boost developers with visual indication of the SVN \"health\". For user-level "
"report, see <a href=\"../user/index.html\" target=\"_top\">user summary</a>.";
insert_report_header(index, run_date, warnings, purpose);
index << " <div class=\"comment\">\n";
if(comment_file != "") {
std::ifstream comment(comment_file.string().c_str());
if(!comment) {
throw std::ios_base::failure("Could not open file " + comment_file.string());
}
std::string comment_data(std::istreambuf_iterator<char>(comment.rdbuf()), std::istreambuf_iterator<char>());
index << comment_data;
}
index << " </div>\n";
index << "</body>\n";
index << "</html>\n";
}
std::set<std::string> libraries;
get_libraries(tests, libraries);
{
std::string toc_path("toc" + release_postfix(release) + ".html");
std::cout << "Writing document " << toc_path << std::endl;
html_writer toc(mode + "/" + toc_path);
toc << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
<< "<html>\n"
<< "<head>\n"
<< " <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
<< " <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
<< " <title>Boost regression: " << source << "</title>\n"
<< "</head>\n"
<< "<body class=\"" << mode << "-toc\">\n"
<< " <div class=\"toc-header-entry\">\n"
<< " <a href=\"index" << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">Report info</a>\n"
<< " </div>\n"
<< " <div class=\"toc-header-entry\">\n"
<< " <a href=\"summary" << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">Summary</a>\n"
<< " </div>\n";
if(mode == "developer") {
toc << " <div class=\"toc-header-entry\">\n"
" <a href=\"issues.html\" class=\"toc-entry\" target=\"_top\">Unresolved issues</a>\n"
" </div>\n";
}
toc << " <div class=\"toc-header-entry\">\n";
insert_view_link(toc, "index", "toc-entry", release);
toc << " </div>\n";
toc << " <hr/>\n";
BOOST_FOREACH(const std::string& library, libraries) {
std::string library_page(encode_path(library));
toc << " <div class=\"toc-entry\">\n"
" <a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"toc-entry\" target=\"_top\">\n"
" " << escape_xml(library) << "\n"
" </a>\n"
" </div>\n";
}
toc << "</body>\n"
"</html>\n";
}
BOOST_FOREACH(const std::string& library, libraries) {
if(show_library(explicit_markup, library, release)) {
std::string library_results(encode_path(library) + release_postfix(release) + "_.html");
std::string library_page(encode_path(library) + release_postfix(release) + ".html");
std::cout << "Writing document " << library_page << std::endl;
{
html_writer document(mode + "/" + library_page);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression: " << escape_xml(library) << "/" << source << "</title>\n"
"</head>\n"
"<frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
"<frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
"<frame name=\"docframe\" src=\"" << escape_uri(library_results) << "\" scrolling=\"auto\"/>\n"
"</frameset>\n"
"</html>\n";
}
std::cout << "Writing document " << library_results << std::endl;
{
html_writer document(mode + "/" + library_results);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\" />\n"
" <title>Boost regression: " << escape_xml(library) << "/" << source << "</title>\n"
"</head>\n"
"\n"
"<body>\n";
insert_page_links(document, encode_path(library), release, alternate_mode(mode));
document << "<h1 class=\"page-title\">\n"
" <a class=\"hover-link\" name=\"" << escape_xml(library) << "\" href=\"http://www.boost.org/libs/" << escape_uri(library) << "\" target=\"_top\">"
<< escape_xml(library) <<
"</a>"
"/"
"<a class=\"hover-link\" href=\"summary.html\" target=\"_top\">" << source << "</a>\n"
"</h1>\n";
insert_report_header(document, run_date, warnings);
// toolset/note/index
boost::unordered_map<std::string, std::size_t> library_marks;
std::vector<node_ptr> notes;
get_unusable(explicit_markup, library, tests, library_marks, notes);
document << "<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" class=\"library-table\" width=\"1%\" summary=\"Library results\">\n"
" <thead>\n";
insert_runners_rows(document, "details", "top", tests, run_date); // okay
insert_toolsets_row(document, tests, explicit_markup, "details", run_date, library, library_marks);
document << " </thead>\n"
" <tfoot>\n";
insert_toolsets_row(document, tests, explicit_markup, "details", run_date, library, library_marks);
insert_runners_rows(document, "details", "bottom", tests, run_date);
document << " </tfoot>\n"
" <tbody>\n";
test_logs_t lib_tests;
std::vector<std::pair<std::string, std::string> > all_toolsets;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
all_toolsets.push_back(std::make_pair(run.runner, toolset.first));
test_structure_t::toolset_t::const_iterator pos = toolset.second.find(library);
if(pos != toolset.second.end()) {
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, pos->second) {
test_log_group_t test_logs;
BOOST_FOREACH(test_structure_t::test_case_t::const_reference log, test_case.second) {
if(is_test_log_a_test_case(log)) {
test_logs.push_back(&log);
}
}
if(!test_logs.empty()) {
std::string category = test_logs.front()->category;
lib_tests[std::make_pair(category, test_case.first)][std::make_pair(run.runner, toolset.first)] = test_logs;
}
}
}
}
}
}
insert_test_section(document, tests, explicit_markup, release, library, lib_tests, all_toolsets, source, mode);
document << " </tbody>\n"
"</table>\n";
if(!notes.empty()) {
document << "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"library-library-notes\" summary=\"library notes\">\n";
for(std::size_t i = 0; i < notes.size(); ++i) {
document << "<tr class=\"library-library-note\">\n"
" <td valign=\"top\" width=\"3em\">\n"
" <a name=\"" << escape_uri(library) << "-note-" << (i + 1) << "\">\n"
" <span class=\"super\">" << (i + 1) << "</span>\n"
" </a>\n"
" </td>\n"
" <td>\n";
std::string refid;
lookup_attr(notes[i], "refid", refid);
show_note(document, notes[i], refid, explicit_markup);
document << " </td>\n"
"</tr>\n";
}
document << "</table>\n";
}
document << "<div id=\"legend\">\n"
<< (mode == "developer"? library_developer_legend : library_user_legend) << "\n"
"</div>\n";
insert_page_links(document, encode_path(library), release, alternate_mode(mode));
document << "</body>\n";
document << "</html>\n";
}
}
}
}

View File

@@ -0,0 +1,29 @@
// result_page.cpp
//
// Copyright MetaCommunications, Inc. 2003-2007.
// Copyright Steven Watanabe 2010-2011
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
#include "xml.hpp"
namespace boost {
namespace regression {
void result_page(const test_structure_t& tests,
const failures_markup_t& explicit_markup,
bool release,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const std::string& mode,
const boost::filesystem::path& comment_file);
}
}

57
reports/src/runners.cpp Normal file
View File

@@ -0,0 +1,57 @@
// Copyright MetaCommunications, Inc. 2003-2004.
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "runners.hpp"
#include "html_writer.hpp"
#include "common.hpp"
#include <boost/foreach.hpp>
#include <iostream>
void boost::regression::runners(const test_structure_t& tests) {
{
html_writer document("runners.html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
" <head>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"master.css\" title=\"master\" />\n"
" <title>runners</title>\n"
" </head>\n"
" <body>\n";
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
document << " <table>"
"<tr>"
"<td>"
"<a href=\"" << escape_uri(encode_path(run.runner)) << ".html\">" << escape_xml(run.runner) << "</a>"
"</td>"
"</tr>"
"</table>\n";
}
}
document << " </body>\n"
"</html>\n";
}
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
std::cout << "Writing runner document " << encode_path(run.runner) << ".html" << std::endl;
html_writer document(encode_path(run.runner) + ".html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\n"
"<html>\n"
"<head>\n"
"<title>" << escape_xml(run.runner) << "</title>\n"
"</head>\n"
"<body>\n"
"<h1>" << escape_xml(run.runner) << "</h1>\n"
"<hr></hr>"
<< run.comment // Intentionally not escaped--contains html formatting
<< "</body>\n"
"</html>\n";
}
}
}

22
reports/src/runners.hpp Normal file
View File

@@ -0,0 +1,22 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_RUNNERS_HPP
#define BOOST_REGRESSION_RUNNERS_HPP
#include "xml.hpp"
namespace boost {
namespace regression {
void runners(const test_structure_t& tests);
}
}
#endif

View File

@@ -0,0 +1,260 @@
// Copyright MetaCommunications, Inc. 2003-2004.
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include <boost/foreach.hpp>
#include <boost/next_prior.hpp>
#include "common.hpp"
#include "summary_page.hpp"
#include "html_writer.hpp"
#include "html.hpp"
#include <iostream>
using namespace boost::regression;
namespace {
// report developer status
// safe
void insert_cell_developer(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& current_cell,
bool release) {
std::string class_ = "summary-" + result_cell_class(explicit_markup, library, toolset, current_cell);
std::string library_page = encode_path(library);
document << "<td class=\"" << class_ << "\" title=\"" << escape_xml(library) << "/" << escape_xml(toolset) << "\">\n";
if(class_ == "summary-unusable") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"n/a"
"</a>"
"&#160;&#160;";
} else if(class_ == "summary-missing") {
document << "&#160;&#160;&#160;&#160;";
} else if(class_ == "summary-fail-unexpected") {
document << "<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"broken"
"</a>";
} else if(class_ == "summary-fail-unexpected-new") {
document << "&#160;&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"fail"
"</a>"
"&#160;&#160;";
} else {
document << "&#160;&#160;OK&#160;&#160;";
}
document << "</td>\n";
}
// report user status
// safe
void insert_cell_user(html_writer& document,
const failures_markup_t& explicit_markup,
const std::string& library,
const std::string& toolset,
const test_structure_t::library_t& current_cell,
bool release) {
std::string class_ = "summary-" + result_cell_class(explicit_markup, library, toolset, current_cell);
std::string library_page = encode_path(library);
document << "<td class=\"" << class_ << " user-" << class_ << "\" title=\"" << escape_xml(library) << "/" << escape_xml(toolset) << "\">\n";
if(class_ == "summary-unusable") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"unusable"
"</a>"
"&#160;";
} else if(class_ == "summary-missing") {
document << "&#160;no&#160;results&#160;";
} else if(class_ == "summary-fail-unexpected") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"regress."
"</a>"
"&#160;";
} else if(class_ == "summary-fail-unexpected-new" ||
class_ == "summary-fail-expected" ||
class_ == "summary-unknown-status" ||
class_ == "summary-fail-unexpected-unresearched") {
document << "&#160;"
"<a href=\"" << escape_uri(library_page) << release_postfix(release) << ".html\" class=\"log-link\" target=\"_top\">"
"details"
"</a>"
"&#160;";
} else {
document << "&#160;pass&#160;";
}
document << "</td>\n";
}
}
// requires: mode = developer | user
// requires: source is the name of an SVN branch
void boost::regression::summary_page(const std::string& mode,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const test_structure_t& tests,
const failures_markup_t & explicit_markup,
bool release) {
std::set<std::string> sorted_libraries;
get_libraries(tests, sorted_libraries);
std::string summary_results("summary" + release_postfix(release) + "_.html");
std::cout << "Writing document " << "summary" << release_postfix(release) << ".html" << std::endl;
{
html_writer document(mode + "/" + "summary" + release_postfix(release) + ".html");
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD html 4.01 Frameset//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd\">\n"
"<html>\n"
" <head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression summary: " << source << "</title>\n"
" </head>\n"
" <frameset cols=\"190px,*\" frameborder=\"0\" framespacing=\"0\" border=\"0\">\n"
" <frame name=\"tocframe\" src=\"toc" << release_postfix(release) << ".html\" scrolling=\"auto\"/>\n"
" <frame name=\"docframe\" src=\"" << summary_results << "\" scrolling=\"auto\"/>\n"
" </frameset>\n"
"</html>\n";
}
// Summary results
std::cout << "Writing document " << summary_results << std::endl;
{
html_writer document(mode + "/" + summary_results);
document << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n"
"<html>\n"
"<head>\n"
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"/>\n"
" <link rel=\"stylesheet\" type=\"text/css\" href=\"../master.css\" title=\"master\"/>\n"
" <title>Boost regression summary: " << source << "</title>\n"
"</head>\n"
"<body>\n";
insert_page_links(document, "summary", release, alternate_mode(mode));
document << "<h1 class=\"page-title\">\n"
" Summary: \n"
" <a class=\"hover-link\" href=\"summary" << release_postfix(release) << ".html\" target=\"_top\">" << source << "</a>\n"
"</h1>\n";
insert_report_header(document, run_date, warnings);
std::size_t num_unusable = 0;
std::size_t num_regressions = 0;
std::size_t num_new_failures = 0;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
BOOST_FOREACH(test_structure_t::toolset_t::const_reference library, toolset.second) {
bool unusable = is_unusable(explicit_markup, library.first, toolset.first);
BOOST_FOREACH(test_structure_t::library_t::const_reference test_case, library.second) {
BOOST_FOREACH(test_structure_t::test_case_t::const_reference test_log, test_case.second) {
if(unusable) ++num_unusable;
else if(!test_log.result && !test_log.status) {
if(test_log.is_new) ++num_new_failures;
else ++num_regressions;
}
}
}
}
}
}
}
document << "<div class=\"statistics\">\n"
"Unusable: " << num_unusable << "\n"
"&#160;|&#160;\n"
"Regressions: " << num_regressions << "\n"
"&#160;|&#160;\n"
"New failures: " << num_new_failures << "\n"
"</div>\n";
// summary table
document << "<table border=\"0\" cellspacing=\"0\" cellpadding=\"0\" width=\"1%\" class=\"summary-table\" summary=\"Overall summary\">\n";
document << "<thead>\n";
insert_runners_rows(document, "summary", "top", tests, run_date);
insert_toolsets_row(document, tests, explicit_markup, "summary", run_date);
document << "</thead>\n";
document << "<tfoot>\n";
insert_toolsets_row(document, tests, explicit_markup, "summary", run_date);
insert_runners_rows(document, "summary", "bottom", tests, run_date);
document << "</tfoot>\n";
document << "<tbody>\n";
BOOST_FOREACH(const std::string& library, sorted_libraries) {
std::string library_page = encode_path(library);
std::string library_header =
"<td class=\"library-name\">\n"
" <a href=\"" + escape_uri(library_page) + release_postfix(release) + ".html\" class=\"library-link\" target=\"_top\">\n"
" " + escape_xml(library) + "\n"
" </a>\n"
"</td>\n";
std::string line_mod;
if(sorted_libraries.size() == 1) line_mod = "-single";
else if(library == *sorted_libraries.begin()) line_mod = "-first";
else if(library == *boost::prior(sorted_libraries.end())) line_mod = "-last";
document << "<tr class=\"summary-row" << line_mod << "\">\n";
document << library_header;
test_structure_t::library_t empty_library;
BOOST_FOREACH(test_structure_t::platform_group_t::const_reference platform, tests.platforms) {
BOOST_FOREACH(test_structure_t::platform_t::const_reference run, platform.second) {
BOOST_FOREACH(test_structure_t::toolset_group_t::const_reference toolset, run.toolsets) {
test_structure_t::toolset_t::const_iterator pos = toolset.second.find(library);
const test_structure_t::library_t * current_cell =
(pos != toolset.second.end())?
&pos->second : &empty_library;
if(mode == "user") {
insert_cell_user(document, explicit_markup, library, toolset.first, *current_cell, release);
} else {
insert_cell_developer(document, explicit_markup, library, toolset.first, *current_cell, release);
}
}
}
}
document << library_header;
document << "</tr>\n";
}
document << "</tbody>\n";
document << "</table>\n";
document << "<div id=\"legend\">\n"
<< (mode == "developer"? summary_developer_legend : summary_user_legend) << "\n"
"</div>\n";
insert_page_links(document, "summary", release, alternate_mode(mode));
document << "</body>\n";
document << "</html>\n";
}
}

View File

@@ -0,0 +1,32 @@
// result_page.cpp
//
// Copyright Steven Watanabe 2013
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_REGRESSION_SUMMARY_PAGE_HPP
#define BOOST_REGRESSION_SUMMARY_PAGE_HPP
#include <boost/filesystem/path.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <string>
#include <vector>
#include "xml.hpp"
namespace boost {
namespace regression {
void summary_page(const std::string& mode,
const std::string& source,
const boost::posix_time::ptime& run_date,
const std::vector<std::string>& warnings,
const test_structure_t& tests,
const failures_markup_t & explicit_markup,
bool release);
}
}
#endif

View File

@@ -0,0 +1,13 @@
from accept_args import *
from char_translation_table import *
from check_existance import *
from checked_system import *
from libxslt import *
from log import *
from makedirs import *
from rename import *
from tar import *
from zip import *
import sourceforge

View File

@@ -0,0 +1,30 @@
import getopt
import re
import sys
def accept_args( args_spec, args, options, usage ):
defaults_num = len(options)
( option_pairs, rest_args ) = getopt.getopt( args, '', args_spec )
map( lambda x: options.__setitem__( x[0], x[1] ), option_pairs )
if ( options.has_key( '--help' ) or len( options.keys() ) == defaults_num ):
usage()
sys.exit( 1 )
if len( rest_args ) > 0 and rest_args[0][0] == '@':
f = open( rest_args[0][1:], 'r' )
config_lines = f.read().splitlines()
f.close()
for l in config_lines:
if re.search( r'^\s*#', l ): continue
if re.search( r'^\s*$', l ): continue
m = re.match( r'^(?P<name>.*?)=(?P<value>.*)', l )
if m:
options[ '--%s' % m.group( 'name' ) ] = m.group( 'value' )
else:
raise 'Invalid format of config line "%s"' % l
return rest_args

View File

@@ -0,0 +1,13 @@
import string
def chr_or_question_mark( c ):
if chr(c) in string.printable and c < 128 and c not in ( 0x09, 0x0b, 0x0c ):
return chr(c)
else:
return '?'
char_translation_table = string.maketrans(
''.join( map( chr, range(0, 256) ) )
, ''.join( map( chr_or_question_mark, range(0, 256) ) )
)

View File

@@ -0,0 +1,9 @@
import os
def check_existance( name ):
a = os.popen( '%s --version' % name )
output = a.read()
rc = a.close()
if rc is not None:
raise Exception( '"%s" is required' % name )

View File

@@ -0,0 +1,22 @@
import os
import string
import sys
def system( commands ):
if sys.platform == 'win32':
f = open( 'tmp.cmd', 'w' )
f.write( string.join( commands, '\n' ) )
f.close()
rc = os.system( 'tmp.cmd' )
return rc
else:
rc = os.system( '&&'.join( commands ) )
return rc
def checked_system( commands, valid_return_codes = [ 0 ] ):
rc = system( commands )
if rc not in [ 0 ] + valid_return_codes:
raise Exception( 'Command sequence "%s" failed with return code %d' % ( commands, rc ) )
return rc

View File

@@ -0,0 +1,49 @@
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import utils.makedirs
import utils.rename
import os.path
import os
import sys
def xslt_param( path, replace_spaces = 1 ):
path = path.replace( '\\', '/' )
if sys.platform == 'win32' and replace_spaces:
path = path.replace( ' ', '%20' )
return path
def libxslt( log, xml_file, xsl_file, output_file, parameters = None ):
utils.makedirs( os.path.dirname( output_file ) )
if sys.platform == 'win32':
os.chdir( os.path.dirname( xsl_file ) )
transform_command = 'xsltproc'
transform_command = transform_command + ' -o ' + '"%s"' % xslt_param( output_file )
if parameters is not None:
for i in parameters:
if parameters[i]:
parameters[i] = xslt_param( parameters[i] )
transform_command = transform_command + ' --param %s "\'%s\'" ' % ( i, parameters[ i ] )
transform_command = transform_command + ' "%s" ' % xslt_param( xsl_file )
transform_command = transform_command + ' "%s" ' % xslt_param( xml_file )
log( transform_command )
rc = os.system( transform_command )
if rc != 0:
raise Exception( '"%s" failed with return code %d' % ( transform_command, rc ) )
output_file = xslt_param( output_file, 0 )
xlst_output_file = xslt_param( output_file )
if output_file != xlst_output_file and os.path.exists( xlst_output_file ):
utils.rename( log, xlst_output_file, output_file )

18
reports/src/utils/log.py Normal file
View File

@@ -0,0 +1,18 @@
import inspect
import sys
def log_level():
frames = inspect.stack()
level = 0
for i in frames[ 3: ]:
if i[0].f_locals.has_key( '__log__' ):
level = level + i[0].f_locals[ '__log__' ]
return level
def stdlog( message ):
sys.stderr.write( '# ' + ' ' * log_level() + message + '\n' )
sys.stderr.flush()
log = stdlog

View File

@@ -0,0 +1,7 @@
import os.path
import os
def makedirs( path ):
if not os.path.exists( path ):
os.makedirs( path )

View File

@@ -0,0 +1,17 @@
# Copyright (c) MetaCommunications, Inc. 2003-2007
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os.path
import os
def rename( log, src, dst ):
log( 'Renaming %s to %s' % ( src, dst ) )
if os.path.exists( dst ):
os.unlink( dst )
os.rename( src, dst )

View File

@@ -0,0 +1,13 @@
import smtplib
def send_mail( mail, subject, msg = '' ):
smtp_server = smtplib.SMTP( 'mail.%s' % mail.split( '@' )[-1] )
smtp_server.sendmail(
mail
, [ mail ]
, 'Subject: %s\n' % subject
+ 'To: %s\n' % mail
+ '\n'
+ msg
)

View File

@@ -0,0 +1,48 @@
import utils.checked_system
import os
import sys
site_dir = '/home/groups/b/bo/boost/htdocs/'
def download( source, destination, user ):
if sys.platform == 'win32':
destination = os.popen( 'cygpath "%s"' % destination ).read().splitlines()[0]
utils.checked_system( [
'rsync -v -r -z --progress %(user)s@shell.sourceforge.net:%(site_dir)s%(source)s %(dest)s'
% { 'user': user, 'site_dir': site_dir, 'source': source, 'dest': destination }
] )
def upload( source, destination, user ):
if sys.platform == 'win32':
source = os.popen( 'cygpath "%s"' % source ).read().splitlines()[0]
utils.checked_system( [
'rsync -v -r -z --progress %(source)s %(user)s@shell.sourceforge.net:%(site_dir)s%(dest)s'
% { 'user': user, 'site_dir': site_dir, 'source': source, 'dest': destination }
] )
def checked_system( commands, user, background = False ):
if not background:
cmd = 'ssh -l %s shell.sourceforge.net "%s"'
else:
cmd = 'ssh -f -l %s shell.sourceforge.net "%s"'
utils.checked_system(
[ cmd % ( user, '&&'.join( commands ) ) ]
)
def untar( archive, user, background ):
checked_system(
[
'cd %s' % os.path.join( site_dir, os.path.dirname( archive ) )
, 'tar -x -z --overwrite --mode=+w -f %s' % os.path.basename( archive )
, 'rm -f %s' % archive
]
, user = user
, background = background
)

16
reports/src/utils/tar.py Normal file
View File

@@ -0,0 +1,16 @@
import utils.checked_system
import os.path
def tar( source_dir, archive_name ):
utils.checked_system( [
'cd %s' % source_dir
, 'tar -c -f ../%s -z *' % archive_name
] )
def untar( archive_path ):
#utils.checked_system( [ 'tar -xjf "%s"' % archive_path ] )
utils.checked_system( [
'cd %s' % os.path.dirname( archive_path )
, 'tar -xjf "%s"' % os.path.basename( archive_path )
] )

12
reports/src/utils/zip.py Normal file
View File

@@ -0,0 +1,12 @@
import zipfile
import os.path
def unzip( archive_path, result_dir ):
z = zipfile.ZipFile( archive_path, 'r', zipfile.ZIP_DEFLATED )
for f in z.infolist():
result = open( os.path.join( result_dir, f.filename ), 'wb' )
result.write( z.read( f.filename ) )
result.close()
z.close()

387
reports/src/xml.cpp Normal file
View File

@@ -0,0 +1,387 @@
// xml.cpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "xml.hpp"
#include "common.hpp"
#include <boost/date_time/posix_time/time_parsers.hpp>
#include <boost/functional/hash.hpp>
#include <fstream>
#include <boost/format.hpp>
#include <boost/foreach.hpp>
using namespace boost::regression;
std::size_t boost::regression::hash_value(const test_case_t& test_case) {
std::size_t result = 0;
boost::hash_combine(result, test_case.test_name);
boost::hash_combine(result, test_case.library);
boost::hash_combine(result, test_case.toolset_name);
return result;
}
bool boost::regression::operator==(const test_case_t& lhs, const test_case_t& rhs) {
return lhs.test_name == rhs.test_name &&
lhs.library == rhs.library &&
lhs.toolset_name == rhs.toolset_name;
}
boost::regression::attr_ptr boost::regression::lookup_attr(node_ptr element, const std::string& name) {
if(element == 0) return 0;
return element->first_attribute(name.data(), name.size());
}
bool boost::regression::lookup_attr(node_ptr element, const std::string& name, std::string& result) {
if(element == 0) return false;
if(attr_ptr attr = lookup_attr(element, name)) {
result = std::string(attr->value(), attr->value_size());
return true;
} else {
return false;
}
}
void require_attr(node_ptr element, const std::string& name, std::string& result) {
if(!lookup_attr(element, name, result)) {
throw xml_error("Missing attribute " + name + " in element " + std::string(element->name(), element->name_size()));
}
}
bool boost::regression::check_attr(node_ptr element, const std::string& name, const std::string& expected) {
if(attr_ptr attr = lookup_attr(element, name)) {
return std::string(attr->value(), attr->value_size()) == expected;
} else {
return false;
}
}
bool boost::regression::check_name(node_ptr element, const std::string& name) {
return std::string(element->name(), element->name_size()) == name;
}
bool boost::regression::check_attr(node_ptr element,
const std::string& element1,
const std::string& attr,
const std::string& expected) {
if(element == 0) return false;
else if(element1 == "*") {
FOR_EACH_ELEMENT(nested, element) {
if(check_attr(nested, attr, expected)) {
return true;
}
}
return false;
} else {
return check_attr(lookup_element(element, element1), attr, expected);
}
}
boost::regression::node_ptr boost::regression::lookup_element(node_ptr element, const std::string& name) {
if(element == 0) {
return 0;
} else {
return element->first_node(name.data(), name.size());
}
}
int boost::regression::count_element(node_ptr element, const std::string& name) {
int result = 0;
element = element->first_node(name.data(), name.size());
while(element != 0) {
++result;
element = element->next_sibling(name.data(), name.size());
}
return result;
}
std::string boost::regression::value_of(node_ptr element) {
if(element && element->value() != 0) {
return std::string(element->value(), element->value_size());
} else {
return std::string();
}
}
void boost::regression::load_failures_markup(node_ptr root, failures_markup_t& failures_markup) {
if(check_name(root, "library")) {
std::string library;
lookup_attr(root, "name", library);
failures_markup.libraries.insert(std::make_pair(library, root));
} else if(check_name(root, "mark-toolset")) {
if(check_attr(root, "status", "required")) {
std::string name;
if(lookup_attr(root, "name", name)) {
failures_markup.required_toolsets.insert(name);
}
}
} else if(check_name(root, "note")) {
std::string refid;
if(lookup_attr(root, "id", refid)) {
failures_markup.notes.insert(std::make_pair(refid, root));
}
} else {
FOR_EACH_ELEMENT(elem, root) {
load_failures_markup(elem, failures_markup);
}
}
}
namespace {
void load_test_log(node_ptr root, test_structure_t::test_log_t& test_log) {
lookup_attr(root, "library", test_log.library);
lookup_attr(root, "test-program", test_log.test_program);
test_log.show_run_output = check_attr(root, "show-run-output", "true");
lookup_attr(root, "toolset", test_log.toolset);
lookup_attr(root, "test-type", test_log.test_type);
lookup_attr(root, "test-name", test_log.test_name);
lookup_attr(root, "target-directory", test_log.target_directory);
// these are set by add_expected_results
test_log.result = false; // check_attr(root, "result", "success");
test_log.expected_result = false; // check_attr(root, "expected-result", "success");
// lookup_attr(root, "expected-reason", test_log.expected_reason);
test_log.status = check_attr(root, "status", "expected");
test_log.is_new = check_attr(root, "is-new", "yes");
lookup_attr(root, "category", test_log.category);
// process compile/run/etc.
FOR_EACH_ELEMENT(elem, root) {
std::string name(elem->name(), elem->name_size());
if(name != "") {
test_structure_t::target_t& target = test_log.targets[name];
target.type = name;
lookup_attr(elem, "timestamp", target.timestamp);
target.result = !check_attr(elem, "result", "fail");
target.contents = elem;
}
}
}
void collect_toolsets(node_ptr root, test_structure_t::toolset_group_t& out, test_structure_t::toolset_group_t& non_test_case_targets) {
if(check_name(root, "test-log")) {
std::string toolset;
if(lookup_attr(root, "toolset", toolset)) {
std::string library, test_name;
lookup_attr(root, "library", library);
lookup_attr(root, "test-name", test_name);
test_structure_t::test_log_t log;
load_test_log(root, log);
if(is_test_log_a_test_case(log))
out[toolset][library][test_name].push_back(log);
else
non_test_case_targets[toolset][library][test_name].push_back(log);
}
} else {
FOR_EACH_ELEMENT(elem, root) {
collect_toolsets(elem, out, non_test_case_targets);
}
}
}
// FIXME: Make sure that Boost.DateTime handles parsing errors correctly
boost::posix_time::ptime parse_time(std::string arg) {
// fix up some formatting problems
if(!arg.empty() && arg[arg.size() - 1] == 'Z') arg.resize(arg.size() - 1);
std::replace(arg.begin(), arg.end(), 'T', ' ');
return boost::posix_time::time_from_string(arg);
}
void validate_run(const test_structure_t::run_t& run) {
if(run.run_type != "incremental" && run.run_type != "full") {
BOOST_THROW_EXCEPTION(xml_error("Expected run-type to be \"incremental\" or \"full\""));
}
// For Git, revision is a SHA, and thus may contain alpha characters
// BOOST_FOREACH(char ch, run.revision) {
// if(!('0' <= ch && ch <= '9')) {
// BOOST_THROW_EXCEPTION(xml_error("Expected revision to be a numeric constant"));
// }
// }
}
}
void boost::regression::load_test_structure(node_ptr root, test_structure_t& structure, std::vector<test_structure_t::run_t*>& runs) {
if(check_name(root, "test-run")) {
test_structure_t::run_t run;
std::string timestamp;
require_attr(root, "runner", run.runner);
require_attr(root, "platform", run.platform);
require_attr(root, "run-type", run.run_type);
require_attr(root, "source", run.source);
require_attr(root, "revision", run.revision);
require_attr(root, "timestamp", timestamp);
// "2010-05-11T18:29:17Z"
run.timestamp = parse_time(timestamp);
run.comment = value_of(lookup_element(root, "comment"));
validate_run(run);
collect_toolsets(root, run.toolsets, run.non_test_case_targets);
structure.platforms[run.platform].push_back(run);
runs.push_back(&structure.platforms[run.platform].back());
} else {
FOR_EACH_ELEMENT(elem, root) {
load_test_structure(elem, structure, runs);
}
}
}
namespace {
struct escaped {
const char* input;
std::size_t size;
bool trim;
};
// okay
void write_characters(html_writer& document, const char* input, std::size_t size) {
for(std::size_t i = 0; i < size; ++i) {
if(input[i] == '<') {
document << "&lt;";
} else if(input[i] == '>') {
document << "&gt;";
} else if(input[i] == '&') {
document << "&amp;";
} else {
document << input[i];
}
}
}
// FIXME: do not break in the middle of a code point
html_writer& operator<<(html_writer& document, const escaped& text) {
std::size_t max_size = 1 << 16;
if(text.trim && (text.size > max_size)) {
write_characters(document, text.input, max_size);
document << str(boost::format("...\n\n[The content has been trimmed by the report system because it exceeds %d bytes]") % max_size);
} else {
write_characters(document, text.input, text.size);
}
return document;
}
escaped make_escaped(const char* input, std::size_t size, bool trim) {
escaped result = { input, size, trim };
return result;
}
std::string escape_characters(const char* input, std::size_t size) {
std::string result;
for(std::size_t i = 0; i < size; ++i) {
if(input[i] == '<') {
result += "&lt;";
} else if(input[i] == '>') {
result += "&gt;";
} else if(input[i] == '&') {
result += "&amp;";
} else if(input[i] == '\'') {
result += "&apos;";
} else if(input[i] == '"') {
result += "&quot;";
} else {
result += input[i];
}
}
return result;
}
}
std::string boost::regression::escape_xml(const std::string& s) {
return escape_characters(s.data(), s.size());
}
void boost::regression::write_to_stream(html_writer& os, node_ptr node, bool trim) {
using namespace boost::property_tree::detail::rapidxml;
switch(node->type()) {
case node_document:
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem);
}
break;
case node_element:
os << '<' << escape_characters(node->name(), node->name_size());
for(attr_ptr attr = node->first_attribute(); attr != 0; attr = attr->next_attribute()) {
os << ' ' << std::string(attr->name(), attr->name_size()) << '=' << '"' << escape_characters(attr->value(), attr->value_size()) << '"';
}
os << '>';
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem);
}
os << '<' << '/' << escape_characters(node->name(), node->name_size()) << '>';
break;
case node_data:
os << make_escaped(node->value(), node->value_size(), trim);
break;
default:
throw xml_error("Don't know how to handle element type");
}
}
void boost::regression::write_contents(html_writer& os, node_ptr node, bool trim) {
FOR_EACH_ELEMENT(elem, node) {
write_to_stream(os, elem, trim);
}
}
namespace {
struct node_storage : document_type {
std::vector<char> storage;
};
}
boost::shared_ptr<document_type> boost::regression::read_xml_file(const char* filename) {
std::ifstream input(filename);
if(!input) {
throw(std::ios_base::failure(std::string("Could not open file: ") + filename));
}
boost::shared_ptr<node_storage> result(new node_storage());
std::streambuf* buf = input.rdbuf();
std::streambuf::int_type ch;
while((ch = buf->sbumpc()) != std::char_traits<char>::eof()) {
result->storage.push_back(ch);
}
result->storage.push_back('\0');
result->parse<boost::property_tree::detail::rapidxml::parse_default>(&result->storage[0]);
return result;
}
namespace {
void load_expected_results(node_ptr root, test_case_t id, expected_results_t& expected_results) {
if(check_name(root, "test-result")) {
lookup_attr(root, "test-name", id.test_name);
bool result = !check_attr(root, "result", "fail");
expected_results.tests.insert(std::make_pair(id, result));
} else {
if(check_name(root, "toolset")) {
std::string name;
lookup_attr(root, "name", name);
id.toolset_name = name;
FOR_EACH_ELEMENT(elem, root) {
if(check_name(elem, "toolset-alias")) {
std::string alias_name;
if(lookup_attr(elem, "name", alias_name)) {
expected_results.toolset_aliases.insert(std::make_pair(alias_name, name));
}
}
}
} else if(check_name(root, "library")) {
lookup_attr(root, "name", id.library);
}
FOR_EACH_ELEMENT(elem, root) {
load_expected_results(elem, id, expected_results);
}
}
}
}
void boost::regression::load_expected_results(node_ptr root, expected_results_t& expected_results) {
test_case_t id;
::load_expected_results(root, id, expected_results);
}

133
reports/src/xml.hpp Normal file
View File

@@ -0,0 +1,133 @@
// xml.hpp
//
// Copyright (c) 2010 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanyiong file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef XML_HPP_INCLUDED
#define XML_HPP_INCLUDED
#include <string>
#include <vector>
#include <iosfwd>
#include <boost/unordered_map.hpp>
#include <boost/unordered_set.hpp>
#include <boost/property_tree/detail/rapidxml.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/variant.hpp>
#include <boost/shared_ptr.hpp>
#include "html_writer.hpp"
namespace boost {
namespace regression {
class xml_error : public std::exception {
public:
explicit xml_error(const std::string& m) : message(m) {}
virtual ~xml_error() throw() {}
virtual const char * what() const throw() { return message.c_str(); }
private:
std::string message;
};
typedef boost::property_tree::detail::rapidxml::xml_node<> node_type;
typedef boost::property_tree::detail::rapidxml::xml_attribute<> attr_type;
typedef boost::property_tree::detail::rapidxml::xml_document<> document_type;
typedef node_type* node_ptr;
typedef attr_type* attr_ptr;
typedef document_type* document_ptr;
struct test_case_t {
std::string toolset_name;
std::string library;
std::string test_name;
};
std::size_t hash_value(const test_case_t& test_case);
bool operator==(const test_case_t& lhs, const test_case_t& rhs);
struct expected_results_t {
typedef boost::unordered_map<test_case_t, bool> tests_t;
typedef boost::unordered_map<std::string, std::string> toolset_aliases_t;
tests_t tests;
toolset_aliases_t toolset_aliases;
};
void load_expected_results(node_ptr root, expected_results_t& expected_results);
struct test_structure_t {
struct target_t {
std::string type;
std::string timestamp;
bool result;
node_ptr contents;
};
typedef boost::variant<std::string, node_ptr> note_t;
struct test_log_t {
std::string library;
std::string test_program;
bool show_run_output;
std::string toolset;
std::string test_type;
std::string test_name;
std::string target_directory;
bool result;
bool expected_result;
std::string expected_reason;
bool status;
bool is_new;
std::string category;
boost::unordered_map<std::string, target_t> targets;
std::vector<note_t> notes;
};
typedef std::vector<test_log_t> test_case_t;
typedef std::map<std::string, test_case_t> library_t;
typedef std::map<std::string, library_t> toolset_t;
typedef std::map<std::string, toolset_t> toolset_group_t;
struct run_t {
std::string runner;
std::string platform;
std::string run_type;
std::string source;
std::string revision;
std::string comment;
boost::posix_time::ptime timestamp;
toolset_group_t toolsets;
toolset_group_t non_test_case_targets;
};
typedef std::vector<run_t> platform_t;
typedef std::map<std::string, platform_t> platform_group_t;
platform_group_t platforms;
};
void load_test_structure(node_ptr root, test_structure_t& structure, std::vector<test_structure_t::run_t*>& runs);
struct failures_markup_t {
boost::unordered_map<std::string, node_ptr> libraries;
boost::unordered_set<std::string> required_toolsets;
boost::unordered_map<std::string, node_ptr> notes;
};
void load_failures_markup(node_ptr root, failures_markup_t& failures_markup);
#define FOR_EACH_ELEMENT(name, node)\
for(::boost::regression::node_ptr name = (node)->first_node(); name != 0; name = name->next_sibling())
attr_ptr lookup_attr(node_ptr element, const std::string& name);
bool lookup_attr(node_ptr element, const std::string& name, std::string& result);
bool check_attr(node_ptr element, const std::string& name, const std::string& expected);
bool check_name(node_ptr element, const std::string& name);
bool check_attr(node_ptr element, const std::string& element1, const std::string& attr, const std::string& expected);
node_ptr lookup_element(node_ptr element, const std::string& name);
int count_element(node_ptr element, const std::string& name);
std::string value_of(node_ptr element);
std::string escape_xml(const std::string& s);
void write_to_stream(html_writer& os, node_ptr node, bool trim=false);
void write_contents(html_writer& document, node_ptr node, bool trim=false);
boost::shared_ptr<document_type> read_xml_file(const char* filename);
}
}
#endif

768
reports/src/zip.hpp Normal file
View File

@@ -0,0 +1,768 @@
// zip.hpp
//
// Copyright (c) 2010, 2013
// Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_ZIP_ZIP_HPP_INCLUDED
#define BOOST_ZIP_ZIP_HPP_INCLUDED
#include <cstring>
#include <ostream>
#include <string>
#include <cstddef>
#include <vector>
#include <bitset>
#include <ios>
#include <boost/array.hpp>
#include <boost/date_time/posix_time/ptime.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <boost/cstdint.hpp>
#include <boost/crc.hpp>
#include <boost/noncopyable.hpp>
#include <boost/iostreams/categories.hpp>
#include <boost/iostreams/operations.hpp>
#include <boost/iostreams/filter/zlib.hpp>
#include <boost/mpl/integral_c.hpp>
namespace boost {
namespace zip {
// TODO: Handle endian conversions
#define BOOST_ZIP_DEFINE_HEADER(name, type, offset) \
static const int name##_offset = (offset); \
static type get_##name(const char* header) { \
type result; \
::std::memcpy(&result, header + (offset), sizeof(type)); \
return result; \
} \
static void set_##name(char* header, type x) { \
::std::memcpy(header + (offset), &x, sizeof(type)); \
}
class zip_archive {
public:
zip_archive(std::ostream& file) : output_file(file), current_offset(0), num_files(0) {}
~zip_archive() {
close();
}
class file_handle;
friend class file_handle;
class file_handle {
public:
typedef char char_type;
struct category :
::boost::iostreams::sink_tag,
::boost::iostreams::closable_tag
{};
file_handle(zip_archive& archive,
const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
const boost::posix_time::ptime& modification_time)
{
self = 0;
archive.open_file(path, creator_version, minimum_required_version, flags, compression_method, modification_time, this);
}
file_handle(zip_archive& archive,
const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method)
{
self = 0;
archive.open_file(path.data(), path.size(), creator_version, minimum_required_version, flags, compression_method, 0, 0, this);
}
::std::streamsize write(const char* data, ::std::streamsize size) {
assert(self != 0);
self->output_file.write(data, size);
compressed_size += size;
self->current_offset += size;
return size;
}
void write_uncompressed(const char* data, ::std::streamsize size) {
assert(self != 0);
crc.process_bytes(data, static_cast<std::size_t>(size));
uncompressed_size += size;
}
void close() {
central_directory_entry::set_crc32(&self->central_directory[offset], crc.checksum());
// These lines cause a warning. Since the warning is legitimate,
// I'm leaving it.
central_directory_entry::set_compressed_size(&self->central_directory[offset], compressed_size);
central_directory_entry::set_uncompressed_size(&self->central_directory[offset], uncompressed_size);
boost::array<char, 12> buffer;
data_descriptor::set_crc32(&buffer[0], crc.checksum());
data_descriptor::set_compressed_size(&buffer[0], compressed_size);
data_descriptor::set_uncompressed_size(&buffer[0], uncompressed_size);
std::streamsize current_pos = self->output_file.tellp();
self->output_file.seekp(pos);
self->output_file.write(&buffer[0], 12);
self->output_file.seekp(current_pos);
self = 0;
}
private:
friend class zip_archive;
file_handle(const file_handle&);
file_handle& operator=(const file_handle&);
boost::crc_32_type crc;
std::streamsize pos;
std::size_t offset;
std::streamsize compressed_size;
std::streamsize uncompressed_size;
zip_archive* self;
};
void open_file(const std::string& path,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
const boost::posix_time::ptime& modification_time,
file_handle* out
)
{
boost::uint16_t date =
modification_time.date().day() +
(modification_time.date().month() << 5) +
((modification_time.date().year() - 1980) << 9);
boost::uint16_t time =
(modification_time.time_of_day().seconds() / 2) +
(modification_time.time_of_day().minutes() << 5) +
(modification_time.time_of_day().hours() << 11);
open_file(path.data(), path.size(), creator_version, minimum_required_version, flags, compression_method, time, date, out);
}
void open_file(const char* path, std::size_t path_size,
boost::uint16_t creator_version,
boost::uint16_t minimum_required_version,
boost::uint16_t flags,
boost::uint16_t compression_method,
boost::uint16_t modification_time,
boost::uint16_t modification_date,
file_handle* handle
)
{
// The file_handle should not be open
assert(handle->self == 0);
handle->pos = static_cast<std::streamsize>(output_file.tellp()) + local_file_header::crc32_offset;
std::vector<char> header(30);
local_file_header::set_signature(&header[0], local_file_header::signature);
local_file_header::set_minimum_required_version(&header[0], minimum_required_version);
local_file_header::set_flags(&header[0], flags);
local_file_header::set_compression_method(&header[0], compression_method);
local_file_header::set_filename_size(&header[0], path_size);
// TODO: handle Zip64
header.insert(header.end(), path, path + path_size);
output_file.write(&header[0], header.size());
std::size_t offset = central_directory.size();
central_directory.resize(offset + 46);
central_directory_entry::set_signature(&central_directory[offset], central_directory_entry::signature);
central_directory_entry::set_creator_version(&central_directory[offset], creator_version);
central_directory_entry::set_minimum_required_version(&central_directory[offset], minimum_required_version);
central_directory_entry::set_flags(&central_directory[offset], flags);
central_directory_entry::set_compression_method(&central_directory[offset], compression_method);
central_directory_entry::set_modification_time(&central_directory[offset], modification_time);
central_directory_entry::set_modification_date(&central_directory[offset], modification_date);
central_directory_entry::set_filename_size(&central_directory[offset], path_size);
central_directory_entry::set_extra_size(&central_directory[offset], 0);
central_directory_entry::set_comment_size(&central_directory[offset], 0);
central_directory_entry::set_file_start_disk(&central_directory[offset], 0);
central_directory_entry::set_internal_attributes(&central_directory[offset], 0);
central_directory_entry::set_external_attributes(&central_directory[offset], 0);
central_directory_entry::set_local_header_offset(&central_directory[offset], current_offset);
central_directory.insert(central_directory.end(), path, path + path_size);
handle->crc.reset();
handle->offset = offset;
handle->compressed_size = 0;
handle->uncompressed_size = 0;
handle->self = this;
current_offset += header.size();
++num_files;
}
void write_file(const std::string& path, const char* contents, std::size_t size) {
std::vector<char> header(30);
local_file_header::set_signature(&header[0], local_file_header::signature);
local_file_header::set_minimum_required_version(&header[0], 10);
local_file_header::set_flags(&header[0], 0);
local_file_header::set_compression_method(&header[0], compression_method::none);
crc_32_type crc;
crc.process_bytes(contents, size);
local_file_header::set_crc32(&header[0], crc.checksum());
local_file_header::set_compressed_size(&header[0], size);
local_file_header::set_uncompressed_size(&header[0], size);
local_file_header::set_filename_size(&header[0], path.size());
// TODO: handle Zip64
header.insert(header.end(), path.begin(), path.end());
output_file.write(&header[0], header.size());
output_file.write(contents, size);
std::size_t offset = central_directory.size();
central_directory.resize(offset + 46);
central_directory_entry::set_signature(&central_directory[offset], central_directory_entry::signature);
central_directory_entry::set_creator_version(&central_directory[offset], 10);
central_directory_entry::set_minimum_required_version(&central_directory[offset], 10);
central_directory_entry::set_flags(&central_directory[offset], 0);
central_directory_entry::set_compression_method(&central_directory[offset], compression_method::none);
// FIXME: find correct date and time
central_directory_entry::set_modification_time(&central_directory[offset], 0);
central_directory_entry::set_modification_date(&central_directory[offset], 0);
central_directory_entry::set_crc32(&central_directory[offset], crc.checksum());
central_directory_entry::set_compressed_size(&central_directory[offset], size);
central_directory_entry::set_uncompressed_size(&central_directory[offset], size);
central_directory_entry::set_filename_size(&central_directory[offset], path.size());
central_directory_entry::set_extra_size(&central_directory[offset], 0);
central_directory_entry::set_comment_size(&central_directory[offset], 0);
central_directory_entry::set_file_start_disk(&central_directory[offset], 0);
central_directory_entry::set_internal_attributes(&central_directory[offset], 0);
central_directory_entry::set_external_attributes(&central_directory[offset], 0);
central_directory_entry::set_local_header_offset(&central_directory[offset], current_offset);
central_directory.insert(central_directory.end(), path.begin(), path.end());
current_offset = current_offset + header.size() + size;
++num_files;
}
void close() {
output_file.write(&central_directory[0], central_directory.size());
if(num_files >= 65536) {
boost::array<char, zip64_end_of_central_directory::size> data;
zip64_end_of_central_directory::set_signature(&data[0], zip64_end_of_central_directory::signature);
zip64_end_of_central_directory::set_size(&data[0], zip64_end_of_central_directory::size - 12);
zip64_end_of_central_directory::set_creator_version(&data[0], 45);
zip64_end_of_central_directory::set_minimum_required_version(&data[0], 45);
zip64_end_of_central_directory::set_disk_number(&data[0], 0);
zip64_end_of_central_directory::set_directory_start_disk(&data[0], 0);
zip64_end_of_central_directory::set_entries_on_disk(&data[0], num_files);
zip64_end_of_central_directory::set_total_entries(&data[0], num_files);
zip64_end_of_central_directory::set_directory_size(&data[0], central_directory.size());
zip64_end_of_central_directory::set_directory_offset(&data[0], current_offset);
output_file.write(&data[0], data.size());
boost::array<char, zip64_end_of_central_directory_locator::size> locator;
zip64_end_of_central_directory_locator::set_signature(&locator[0], zip64_end_of_central_directory_locator::signature);
zip64_end_of_central_directory_locator::set_end_of_directory_disk(&locator[0], 0);
zip64_end_of_central_directory_locator::set_end_of_directory_offset(&locator[0], current_offset + central_directory.size());
zip64_end_of_central_directory_locator::set_total_disks(&locator[0], 1);
output_file.write(&locator[0], locator.size());
std::vector<char> end(22);
end_of_central_directory::set_signature(&end[0], end_of_central_directory::signature);
end_of_central_directory::set_disk_number(&end[0], 0);
end_of_central_directory::set_directory_start_disk(&end[0], 0);
end_of_central_directory::set_entries_on_disk(&end[0], 0xFFFFu);
end_of_central_directory::set_total_entries(&end[0], 0xFFFFu);
end_of_central_directory::set_directory_size(&end[0], central_directory.size());
end_of_central_directory::set_directory_offset(&end[0], current_offset);
end_of_central_directory::set_comment_length(&end[0], 0);
output_file.write(&end[0], end.size());
} else {
std::vector<char> end(22);
end_of_central_directory::set_signature(&end[0], end_of_central_directory::signature);
end_of_central_directory::set_disk_number(&end[0], 0);
end_of_central_directory::set_directory_start_disk(&end[0], 0);
end_of_central_directory::set_entries_on_disk(&end[0], num_files);
end_of_central_directory::set_total_entries(&end[0], num_files);
end_of_central_directory::set_directory_size(&end[0], central_directory.size());
end_of_central_directory::set_directory_offset(&end[0], current_offset);
end_of_central_directory::set_comment_length(&end[0], 0);
output_file.write(&end[0], end.size());
}
}
private:
std::ostream& output_file;
std::vector<char> central_directory;
std::streamsize current_offset;
std::size_t num_files;
// little endian
struct local_file_header {
static const boost::uint32_t signature = 0x04034b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(flags, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(compression_method, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(modification_time, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(modification_date, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 14);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 18);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 22);
BOOST_ZIP_DEFINE_HEADER(filename_size, boost::uint16_t, 26);
BOOST_ZIP_DEFINE_HEADER(extra_size, boost::uint16_t, 28);
static char* filename(void* header) {
return static_cast<char*>(header) + 30;
}
const char* filename(const void* header) {
return static_cast<const char*>(header) + 30;
}
};
struct data_descriptor {
// The signature may or may not be present
static const boost::uint32_t signature = 0x08074b50u;
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 4);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 8);
// FIXME: handle skipping the signature automatically
};
// Not implemented Archive decryption header
// Not implemented Archive extra data record
struct central_directory_entry {
static const boost::uint32_t signature = 0x02014b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(creator_version, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(flags, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(compression_method, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(modification_time, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(modification_date, boost::uint16_t, 14);
BOOST_ZIP_DEFINE_HEADER(crc32, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint32_t, 20);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint32_t, 24);
BOOST_ZIP_DEFINE_HEADER(filename_size, boost::uint16_t, 28);
BOOST_ZIP_DEFINE_HEADER(extra_size, boost::uint16_t, 30);
BOOST_ZIP_DEFINE_HEADER(comment_size, boost::uint16_t, 32);
BOOST_ZIP_DEFINE_HEADER(file_start_disk, boost::uint16_t, 34);
BOOST_ZIP_DEFINE_HEADER(internal_attributes, boost::uint16_t, 36);
BOOST_ZIP_DEFINE_HEADER(external_attributes, boost::uint32_t, 38);
BOOST_ZIP_DEFINE_HEADER(local_header_offset, boost::uint32_t, 42);
// TODO: filename, extra, comment
};
struct digital_signature {
static const boost::uint32_t signature = 0x05054b50;
BOOST_ZIP_DEFINE_HEADER(data_size, boost::uint16_t, 4);
// TODO: data
};
struct zip64_end_of_central_directory {
static const boost::uint32_t signature = 0x06064b50u;
// The value stored into the "size of zip64 end of central
// directory record" should be the size of the remaining
// record and should not include the leading 12 bytes.
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(size, boost::uint64_t, 4);
BOOST_ZIP_DEFINE_HEADER(creator_version, boost::uint16_t, 12);
BOOST_ZIP_DEFINE_HEADER(minimum_required_version, boost::uint16_t, 14);
BOOST_ZIP_DEFINE_HEADER(disk_number, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(directory_start_disk, boost::uint32_t, 20);
BOOST_ZIP_DEFINE_HEADER(entries_on_disk, boost::uint64_t, 24);
BOOST_ZIP_DEFINE_HEADER(total_entries, boost::uint64_t, 32);
BOOST_ZIP_DEFINE_HEADER(directory_size, boost::uint64_t, 40);
BOOST_ZIP_DEFINE_HEADER(directory_offset, boost::uint64_t, 48);
static const size_t size = 56;
// TODO: data
// Header ID - 2 bytes
// Data Size - 4 bytes
};
// H
struct zip64_end_of_central_directory_locator {
static const boost::uint32_t signature = 0x07064b50;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(end_of_directory_disk, boost::uint32_t, 4);
BOOST_ZIP_DEFINE_HEADER(end_of_directory_offset, boost::uint64_t, 8);
BOOST_ZIP_DEFINE_HEADER(total_disks, boost::uint32_t, 16);
static const size_t size = 20;
};
struct end_of_central_directory {
static const uint32_t signature = 0x06054b50u;
BOOST_ZIP_DEFINE_HEADER(signature, boost::uint32_t, 0);
BOOST_ZIP_DEFINE_HEADER(disk_number, boost::uint16_t, 4);
BOOST_ZIP_DEFINE_HEADER(directory_start_disk, boost::uint16_t, 6);
BOOST_ZIP_DEFINE_HEADER(entries_on_disk, boost::uint16_t, 8);
BOOST_ZIP_DEFINE_HEADER(total_entries, boost::uint16_t, 10);
BOOST_ZIP_DEFINE_HEADER(directory_size, boost::uint32_t, 12);
BOOST_ZIP_DEFINE_HEADER(directory_offset, boost::uint32_t, 16);
BOOST_ZIP_DEFINE_HEADER(comment_length, boost::uint16_t, 20);
};
public:
struct version {
static const boost::uint16_t system_mask = 0xFF00u;
static const boost::uint16_t ms_dos = 0u << 8;
static const boost::uint16_t amiga = 1u << 8;
static const boost::uint16_t open_vms = 2u << 8;
static const boost::uint16_t unix_ = 3u << 8;
static const boost::uint16_t vm_cms = 4u << 8;
static const boost::uint16_t atari_st = 5u << 8;
static const boost::uint16_t os_2_hpfs = 6u << 8;
static const boost::uint16_t macintosh = 7u << 8;
static const boost::uint16_t z_system = 8u << 8;
static const boost::uint16_t cp_m = 9u << 8;
static const boost::uint16_t windows_ntfs = 10u << 8;
static const boost::uint16_t mvs = 11u << 8;
static const boost::uint16_t vse = 12u << 8;
static const boost::uint16_t acorn_risc = 13u << 8;
static const boost::uint16_t vfat = 14u << 8;
static const boost::uint16_t alternate_mvs = 15u << 8;
static const boost::uint16_t beos = 16u << 8;
static const boost::uint16_t tandem = 17u << 8;
static const boost::uint16_t os_400 = 18u << 8;
static const boost::uint16_t darwin = 19u << 8;
// e.g. 62 = ZIP 6.2
static const boost::uint16_t zip_version_mask = 0xFFu;
static const boost::uint16_t default_ = 10;
static const boost::uint16_t file_is_volume_label = 11;
static const boost::uint16_t file_is_folder = 20;
static const boost::uint16_t file_is_compressed_with_deflate = 20;
static const boost::uint16_t zip64 = 45;
// TODO: ...
};
struct flags {
static const boost::uint16_t encrypted = 0x1u;
static const boost::uint16_t imploding_8k_dictionary = 0x2u;
static const boost::uint16_t imploding_3_shannon_faro = 0x4u;
static const boost::uint16_t deflating_options_mask = 0x6u;
static const boost::uint16_t deflating_normal = 0x0u;
static const boost::uint16_t deflating_maximum = 0x2u;
static const boost::uint16_t deflating_fast = 0x4u;
static const boost::uint16_t deflating_super_fast = 0x6u;
static const boost::uint16_t lzma_eos = 0x2u;
static const boost::uint16_t has_data_descriptor = 0x8u;
static const boost::uint16_t enhanced_deflating = 0x10;
static const boost::uint16_t strong_encryption = 0x20;
static const boost::uint16_t utf8 = 0x800;
static const boost::uint16_t mask_local_header_data = 0x2000;
};
struct compression_method {
static const boost::uint16_t none = 0;
static const boost::uint16_t shrink = 1;
static const boost::uint16_t reduce_1 = 2;
static const boost::uint16_t reduce_2 = 3;
static const boost::uint16_t reduce_3 = 4;
static const boost::uint16_t reduce_4 = 5;
static const boost::uint16_t implode = 6;
static const boost::uint16_t tokenizing = 7;
static const boost::uint16_t deflate = 8;
static const boost::uint16_t deflate64 = 9;
static const boost::uint16_t pkware_dcli = 10;
static const boost::uint16_t bzip2 = 12;
static const boost::uint16_t lzma = 14;
static const boost::uint16_t ibm_terse = 18;
static const boost::uint16_t lz77 = 19;
static const boost::uint16_t wavpack = 97;
static const boost::uint16_t ppmd_i_1 = 98;
};
struct internal_attributes {
static const boost::uint16_t ascii = 0x1;
};
struct header_id {
static const boost::uint16_t zip64 = 0x0001;
static const boost::uint16_t av_info = 0x0007;
//static const boost::uint16_t extended_language_encoding = 0x0008;
static const boost::uint16_t os_2 = 0x0009;
static const boost::uint16_t ntfs = 0x000a;
static const boost::uint16_t open_vms = 0x000c;
static const boost::uint16_t unix_ = 0x000d;
//static const boost::uint16_t file_stream = 0x000e;
static const boost::uint16_t patch_descriptor = 0x000f;
static const boost::uint16_t x509_certificate = 0x0014;
static const boost::uint16_t x509_certificate_id_file = 0x0015;
static const boost::uint16_t x509_certificate_id_directory = 0x0016;
static const boost::uint16_t strong_encryption_header = 0x0017;
static const boost::uint16_t record_management_controls = 0x0018;
static const boost::uint16_t encyption_recipients = 0x0019;
static const boost::uint16_t ibm_uncompressed = 0x0065;
static const boost::uint16_t ibm_compressed = 0x0066;
static const boost::uint16_t poszip4690 = 0x4690;
// TODO: Third party mappings
};
private:
struct zip64_extended_information {
BOOST_ZIP_DEFINE_HEADER(tag, boost::uint16_t, 0);
BOOST_ZIP_DEFINE_HEADER(size, boost::uint16_t, 2);
BOOST_ZIP_DEFINE_HEADER(uncompressed_size, boost::uint64_t, 4);
BOOST_ZIP_DEFINE_HEADER(compressed_size, boost::uint64_t, 12);
BOOST_ZIP_DEFINE_HEADER(local_header_offset, boost::uint64_t, 20);
BOOST_ZIP_DEFINE_HEADER(disk_start_number, boost::uint32_t, 28);
};
};
class shrink_filter : ::boost::noncopyable {
public:
typedef char char_type;
struct category :
::boost::iostreams::output_filter_tag,
::boost::iostreams::closable_tag
{};
shrink_filter()
{
memory = new lzw_node[1 << 13];
// no-throw from here on
code_size = 9;
for(int i = 0; i < (1 << code_size); ++i) {
initialize_node(i);
}
used_codes.set(256);
current_node = &root;
buf = 0;
pos = 0;
for(int i = 0; i < 256; ++i) {
root.children[i] = make_node(i);
}
next_code = 257;
}
~shrink_filter() {
delete[] memory;
}
template<class Sink>
bool put(Sink& sink, char ch) {
write_char(static_cast<unsigned char>(ch));
return do_write(sink);
}
template<class Sink>
void close(Sink& sink) {
if(current_node != &root) {
write_code(get_encoding(current_node));
current_node = &root;
}
do_write(sink);
if(pos != 0) {
::boost::iostreams::put(sink, buf & 0xFF);
pos = 0;
}
}
private:
template<class Sink>
bool do_write(Sink& sink) {
while(pos >= 8) {
if(!::boost::iostreams::put(sink, static_cast<char>(buf & 0xFF))) {
return false;
}
buf >>= 8;
pos -= 8;
}
return true;
}
void write_char(unsigned char ch) {
if(current_node->children[ch] != 0) {
current_node = current_node->children[ch];
} else {
int encoding = get_encoding(current_node);
write_code(encoding);
for(;; ++next_code) {
if(next_code == (1 << code_size)) {
if(code_size == 13) {
write_code(256);
write_code(2);
free_leaves();
next_code = 257;
} else {
write_code(256);
write_code(1);
increment_code_size();
}
}
if(!used_codes.test(next_code)) {
current_node->children[ch] = make_node(next_code);
++next_code;
break;
}
}
current_node = root.children[ch];
}
}
void write_code(int code) {
buf |= static_cast<boost::uint64_t>(code) << pos;
pos += code_size;
}
struct lzw_node {
lzw_node* children[256];
};
int get_encoding(lzw_node* node) const {
return node - memory;
}
bool free_leaves(lzw_node* node) {
bool result = true;
for(int i = 0; i < 256; ++i) {
if(node->children[i] != 0) {
result = false;
if(free_leaves(node->children[i])) {
destroy_node(node->children[i]);
node->children[i] = 0;
}
}
}
return result;
}
void increment_code_size() {
for(int i = (1 << code_size); i < (1 << (code_size + 1)); ++i) {
initialize_node(i);
}
++code_size;
}
void free_leaves() {
for(int i = 0; i < 256; ++i) {
free_leaves(root.children[i]);
}
}
void initialize_node(int encoding) {
lzw_node* result = memory + encoding;
for(int i = 0; i < 256; ++i) {
result->children[i] = 0;
}
}
lzw_node* make_node(int encoding = 0) {
assert(!used_codes.test(encoding));
lzw_node* result = memory + encoding;
assert(result >= memory);
assert(result < memory + (1 << code_size));
used_codes.set(encoding);
return result;
}
void destroy_node(lzw_node* node) {
used_codes.reset(get_encoding(node));
}
lzw_node* memory;
lzw_node root;
lzw_node* current_node;
int code_size;
int next_code;
::std::bitset<(1 << 13)> used_codes;
::boost::uint64_t buf;
int pos;
};
class deflate_filter : public ::boost::iostreams::zlib_compressor {
public:
deflate_filter() :
boost::iostreams::zlib_compressor(boost::iostreams::zlib_params(
boost::iostreams::zlib::default_compression,
boost::iostreams::zlib::deflated,
boost::iostreams::zlib::default_window_bits,
boost::iostreams::zlib::default_mem_level,
boost::iostreams::zlib::default_strategy,
true /* noheader */,
false /* crc */))
{}
};
class noop_filter
{
public:
typedef char char_type;
struct category :
::boost::iostreams::output_filter_tag,
::boost::iostreams::multichar_tag
{};
template<class Device>
std::streamsize write(Device& dev, const char * data, std::streamsize size) {
return boost::iostreams::write(dev, data, size);
}
};
template<class Filter>
struct compression_method;
template<>
struct compression_method< ::boost::zip::noop_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::none
>
{};
template<>
struct compression_method< ::boost::zip::shrink_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::shrink
>
{};
template<>
struct compression_method< ::boost::zip::deflate_filter> :
::boost::mpl::integral_c<
::boost::uint16_t,
::boost::zip::zip_archive::compression_method::deflate
>
{};
template<class Filter>
class zip_member_sink {
public:
typedef char char_type;
struct category :
::boost::iostreams::sink_tag,
::boost::iostreams::closable_tag
{};
zip_member_sink(zip_archive& archive, const std::string& path)
: file(archive, path, 10, 10, 0,
compression_method<Filter>::value) {}
~zip_member_sink() {
close();
}
::std::streamsize write(const char* data, ::std::streamsize size) {
file.write_uncompressed(data, size);
::boost::iostreams::write(filter, file, data, size);
return size;
}
void close() {
::boost::iostreams::close(filter, file, ::std::ios_base::out);
::boost::iostreams::close(file);
}
private:
zip_archive::file_handle file;
Filter filter;
};
typedef zip_member_sink<shrink_filter> shrink_sink;
typedef zip_member_sink<deflate_filter> deflate_sink;
typedef zip_member_sink<noop_filter> nocompression_sink;
}
}
#endif