summaryrefslogtreecommitdiff
path: root/boost/test/impl
diff options
context:
space:
mode:
Diffstat (limited to 'boost/test/impl')
-rw-r--r--boost/test/impl/compiler_log_formatter.ipp2
-rw-r--r--boost/test/impl/execution_monitor.ipp29
-rw-r--r--boost/test/impl/framework.ipp38
-rw-r--r--boost/test/impl/junit_log_formatter.ipp477
-rw-r--r--boost/test/impl/plain_report_formatter.ipp2
-rw-r--r--boost/test/impl/progress_monitor.ipp2
-rw-r--r--boost/test/impl/test_tools.ipp14
-rw-r--r--boost/test/impl/unit_test_log.ipp9
-rw-r--r--boost/test/impl/unit_test_main.ipp8
-rw-r--r--boost/test/impl/unit_test_monitor.ipp8
-rw-r--r--boost/test/impl/unit_test_parameters.ipp238
11 files changed, 491 insertions, 336 deletions
diff --git a/boost/test/impl/compiler_log_formatter.ipp b/boost/test/impl/compiler_log_formatter.ipp
index 8c76788109..a4e045cae0 100644
--- a/boost/test/impl/compiler_log_formatter.ipp
+++ b/boost/test/impl/compiler_log_formatter.ipp
@@ -62,7 +62,7 @@ test_phase_identifier()
void
compiler_log_formatter::log_start( std::ostream& output, counter_t test_cases_amount )
{
- m_color_output = runtime_config::get<bool>( runtime_config::COLOR_OUTPUT );
+ m_color_output = runtime_config::get<bool>( runtime_config::btrt_color_output );
if( test_cases_amount > 0 )
output << "Running " << test_cases_amount << " test "
diff --git a/boost/test/impl/execution_monitor.ipp b/boost/test/impl/execution_monitor.ipp
index 3ddcf67137..0c5690ca89 100644
--- a/boost/test/impl/execution_monitor.ipp
+++ b/boost/test/impl/execution_monitor.ipp
@@ -32,7 +32,7 @@
#include <boost/cstdlib.hpp> // for exit codes
#include <boost/config.hpp> // for workarounds
#include <boost/core/ignore_unused.hpp> // for ignore_unused
-#ifndef BOOST_NO_EXCEPTION
+#ifndef BOOST_NO_EXCEPTIONS
#include <boost/exception/get_error_info.hpp> // for get_error_info
#include <boost/exception/current_exception_cast.hpp> // for current_exception_cast
#endif
@@ -196,7 +196,7 @@ namespace boost {
// ************** throw_exception ************** //
// ************************************************************************** //
-#ifdef BOOST_NO_EXCEPTION
+#ifdef BOOST_NO_EXCEPTIONS
void throw_exception( std::exception const & e ) { abort(); }
#endif
@@ -216,7 +216,7 @@ namespace detail {
# define BOOST_TEST_VSNPRINTF( a1, a2, a3, a4 ) vsnprintf( (a1), (a2), (a3), (a4) )
#endif
-#ifndef BOOST_NO_EXCEPTION
+#ifndef BOOST_NO_EXCEPTIONS
template <typename ErrorInfo>
typename ErrorInfo::value_type
@@ -1285,7 +1285,7 @@ execution_monitor::execute( boost::function<int ()> const& F )
catch( ... )
{ detail::report_error( execution_exception::cpp_exception_error, "unknown type" ); }
-#endif // !BOOST_NO_EXCEPTION
+#endif // !BOOST_NO_EXCEPTIONS
return 0; // never reached; supplied to quiet compiler warnings
} // execute
@@ -1354,11 +1354,7 @@ unsigned
enable( unsigned mask )
{
boost::ignore_unused(mask);
-
-#if defined(UNDER_CE)
- /* Not Implemented in Windows CE */
- return BOOST_FPE_OFF;
-#elif defined(BOOST_SEH_BASED_SIGNAL_HANDLING)
+#if defined(BOOST_TEST_FPE_SUPPORT_WITH_SEH__)
_clearfp();
#if BOOST_WORKAROUND( BOOST_MSVC, <= 1310)
@@ -1373,9 +1369,10 @@ enable( unsigned mask )
if( ::_controlfp_s( 0, old_cw & ~mask, BOOST_FPE_ALL ) != 0 )
return BOOST_FPE_INV;
#endif
-
return ~old_cw & BOOST_FPE_ALL;
-#elif defined(__GLIBC__) && defined(__USE_GNU)
+
+#elif defined(BOOST_TEST_FPE_SUPPORT_WITH_GLIBC_EXTENSIONS__)
+ // same macro definition as in execution_monitor.hpp
if (BOOST_FPE_ALL == BOOST_FPE_OFF)
/* Not Implemented */
return BOOST_FPE_OFF;
@@ -1395,12 +1392,8 @@ disable( unsigned mask )
{
boost::ignore_unused(mask);
-#if defined(UNDER_CE)
- /* Not Implemented in Windows CE */
- return BOOST_FPE_INV;
-#elif defined(BOOST_SEH_BASED_SIGNAL_HANDLING)
+#if defined(BOOST_TEST_FPE_SUPPORT_WITH_SEH__)
_clearfp();
-
#if BOOST_WORKAROUND( BOOST_MSVC, <= 1310)
unsigned old_cw = ::_controlfp( 0, 0 );
::_controlfp( old_cw | mask, BOOST_FPE_ALL );
@@ -1413,9 +1406,9 @@ disable( unsigned mask )
if( ::_controlfp_s( 0, old_cw | mask, BOOST_FPE_ALL ) != 0 )
return BOOST_FPE_INV;
#endif
-
return ~old_cw & BOOST_FPE_ALL;
-#elif defined(__GLIBC__) && defined(__USE_GNU)
+
+#elif defined(BOOST_TEST_FPE_SUPPORT_WITH_GLIBC_EXTENSIONS__)
if (BOOST_FPE_ALL == BOOST_FPE_OFF)
/* Not Implemented */
return BOOST_FPE_INV;
diff --git a/boost/test/impl/framework.ipp b/boost/test/impl/framework.ipp
index 298dde92ea..a35e8a54ac 100644
--- a/boost/test/impl/framework.ipp
+++ b/boost/test/impl/framework.ipp
@@ -396,7 +396,7 @@ parse_filters( test_unit_id master_tu_id, test_unit_id_list& tu_to_enable, test_
// 10. collect tu to enable and disable based on filters
bool had_selector_filter = false;
- std::vector<std::string> const& filters = runtime_config::get<std::vector<std::string> >( runtime_config::RUN_FILTERS );
+ std::vector<std::string> const& filters = runtime_config::get<std::vector<std::string> >( runtime_config::btrt_run_filters );
BOOST_TEST_FOREACH( const_string, filter, filters ) {
BOOST_TEST_SETUP_ASSERT( !filter.is_empty(), "Invalid filter specification" );
@@ -552,7 +552,7 @@ public:
test_unit_id_list tu_to_disable;
// 10. If there are any filters supplied, figure out lists of test units to enable/disable
- bool had_selector_filter = !runtime_config::get<std::vector<std::string> >( runtime_config::RUN_FILTERS ).empty() &&
+ bool had_selector_filter = !runtime_config::get<std::vector<std::string> >( runtime_config::btrt_run_filters ).empty() &&
parse_filters( master_tu_id, tu_to_enable, tu_to_disable );
// 20. Set the stage: either use default run status or disable all test units
@@ -657,7 +657,7 @@ public:
if( tu.p_type == TUT_SUITE ) {
test_suite const& ts = static_cast<test_suite const&>( tu );
- if( runtime_config::get<unsigned>( runtime_config::RANDOM_SEED ) == 0 ) {
+ if( runtime_config::get<unsigned>( runtime_config::btrt_random_seed ) == 0 ) {
typedef std::pair<counter_t,test_unit_id> value_type;
BOOST_TEST_FOREACH( value_type, chld, ts.m_ranked_children ) {
@@ -845,26 +845,26 @@ setup_loggers()
BOOST_TEST_I_TRY {
#ifdef BOOST_TEST_SUPPORT_TOKEN_ITERATOR
- bool has_combined_logger = runtime_config::has( runtime_config::COMBINED_LOGGER )
- && !runtime_config::get< std::vector<std::string> >( runtime_config::COMBINED_LOGGER ).empty();
+ bool has_combined_logger = runtime_config::has( runtime_config::btrt_combined_logger )
+ && !runtime_config::get< std::vector<std::string> >( runtime_config::btrt_combined_logger ).empty();
#else
bool has_combined_logger = false;
#endif
if( !has_combined_logger ) {
- unit_test_log.set_threshold_level( runtime_config::get<log_level>( runtime_config::LOG_LEVEL ) );
- const output_format format = runtime_config::get<output_format>( runtime_config::LOG_FORMAT );
+ unit_test_log.set_threshold_level( runtime_config::get<log_level>( runtime_config::btrt_log_level ) );
+ const output_format format = runtime_config::get<output_format>( runtime_config::btrt_log_format );
unit_test_log.set_format( format );
runtime_config::stream_holder& stream_logger = s_frk_state().m_log_sinks[format];
- if( runtime_config::has( runtime_config::LOG_SINK ) )
- stream_logger.setup( runtime_config::get<std::string>( runtime_config::LOG_SINK ) );
+ if( runtime_config::has( runtime_config::btrt_log_sink ) )
+ stream_logger.setup( runtime_config::get<std::string>( runtime_config::btrt_log_sink ) );
unit_test_log.set_stream( stream_logger.ref() );
}
else
{
- const std::vector<std::string>& v_output_format = runtime_config::get< std::vector<std::string> >( runtime_config::COMBINED_LOGGER ) ;
+ const std::vector<std::string>& v_output_format = runtime_config::get< std::vector<std::string> >( runtime_config::btrt_combined_logger ) ;
static const std::pair<const char*, log_level> all_log_levels[] = {
std::make_pair( "all" , log_successful_tests ),
@@ -1033,26 +1033,26 @@ init( init_unit_test_func init_func, int argc, char* argv[] )
impl::setup_loggers();
// 30. Set the desired report level, format and sink
- results_reporter::set_level( runtime_config::get<report_level>( runtime_config::REPORT_LEVEL ) );
- results_reporter::set_format( runtime_config::get<output_format>( runtime_config::REPORT_FORMAT ) );
+ results_reporter::set_level( runtime_config::get<report_level>( runtime_config::btrt_report_level ) );
+ results_reporter::set_format( runtime_config::get<output_format>( runtime_config::btrt_report_format ) );
- if( runtime_config::has( runtime_config::REPORT_SINK ) )
- s_frk_state().m_report_sink.setup( runtime_config::get<std::string>( runtime_config::REPORT_SINK ) );
+ if( runtime_config::has( runtime_config::btrt_report_sink ) )
+ s_frk_state().m_report_sink.setup( runtime_config::get<std::string>( runtime_config::btrt_report_sink ) );
results_reporter::set_stream( s_frk_state().m_report_sink.ref() );
// 40. Register default test observers
register_observer( results_collector );
register_observer( unit_test_log );
- if( runtime_config::get<bool>( runtime_config::SHOW_PROGRESS ) ) {
+ if( runtime_config::get<bool>( runtime_config::btrt_show_progress ) ) {
progress_monitor.set_stream( std::cout ); // defaults to stdout
register_observer( progress_monitor );
}
// 50. Set up memory leak detection
- unsigned long detect_mem_leak = runtime_config::get<unsigned long>( runtime_config::DETECT_MEM_LEAKS );
+ unsigned long detect_mem_leak = runtime_config::get<unsigned long>( runtime_config::btrt_detect_mem_leaks );
if( detect_mem_leak > 0 ) {
- debug::detect_memory_leaks( true, runtime_config::get<std::string>( runtime_config::REPORT_MEM_LEAKS ) );
+ debug::detect_memory_leaks( true, runtime_config::get<std::string>( runtime_config::btrt_report_mem_leaks ) );
debug::break_memory_alloc( (long)detect_mem_leak );
}
@@ -1408,7 +1408,7 @@ run( test_unit_id id, bool continue_test )
test_case_counter tcc;
traverse_test_tree( id, tcc );
- BOOST_TEST_SETUP_ASSERT( tcc.p_count != 0 , runtime_config::get<std::vector<std::string> >( runtime_config::RUN_FILTERS ).empty()
+ BOOST_TEST_SETUP_ASSERT( tcc.p_count != 0 , runtime_config::get<std::vector<std::string> >( runtime_config::btrt_run_filters ).empty()
? BOOST_TEST_L( "test tree is empty" )
: BOOST_TEST_L( "no test cases matching filter or all test cases were disabled" ) );
@@ -1428,7 +1428,7 @@ run( test_unit_id id, bool continue_test )
}
}
- unsigned seed = runtime_config::get<unsigned>( runtime_config::RANDOM_SEED );
+ unsigned seed = runtime_config::get<unsigned>( runtime_config::btrt_random_seed );
switch( seed ) {
case 0:
break;
diff --git a/boost/test/impl/junit_log_formatter.ipp b/boost/test/impl/junit_log_formatter.ipp
index a07ee5e2b0..dd528bc903 100644
--- a/boost/test/impl/junit_log_formatter.ipp
+++ b/boost/test/impl/junit_log_formatter.ipp
@@ -90,7 +90,7 @@ junit_log_formatter::log_start( std::ostream& ostr, counter_t test_cases_amount)
{
map_tests.clear();
list_path_to_root.clear();
- root_id = INV_TEST_UNIT_ID;
+ runner_log_entry.clear();
}
//____________________________________________________________________________//
@@ -101,10 +101,12 @@ public:
std::ostream& stream,
test_unit const& ts,
junit_log_formatter::map_trace_t const& mt,
+ junit_impl::junit_log_helper const& runner_log_,
bool display_build_info )
: m_stream(stream)
, m_ts( ts )
, m_map_test( mt )
+ , runner_log( runner_log_ )
, m_id( 0 )
, m_display_build_info(display_build_info)
{ }
@@ -126,46 +128,71 @@ public:
m_stream << "</" << entry_type << ">";
}
- void visit( test_case const& tc )
+ struct conditional_cdata_helper {
+ std::ostream &ostr;
+ std::string const field;
+ bool empty;
+
+ conditional_cdata_helper(std::ostream &ostr_, std::string field_)
+ : ostr(ostr_)
+ , field(field_)
+ , empty(true)
+ {}
+
+ ~conditional_cdata_helper() {
+ if(!empty) {
+ ostr << BOOST_TEST_L( "]]>" ) << "</" << field << '>' << std::endl;
+ }
+ }
+
+ void operator()(const std::string& s) {
+ bool current_empty = s.empty();
+ if(empty) {
+ if(!current_empty) {
+ empty = false;
+ ostr << '<' << field << '>' << BOOST_TEST_L( "<![CDATA[" );
+ }
+ }
+ if(!current_empty) {
+ ostr << s;
+ }
+ }
+ };
+
+ std::list<std::string> build_skipping_chain(test_case const & tc) const
{
- test_results const& tr = results_collector.results( tc.p_id );
+ // we enter here because we know that the tc has been skipped.
+ // either junit has not seen this tc, or it is indicated as disabled
+ assert(m_map_test.count(tc.p_id) == 0 || results_collector.results( tc.p_id ).p_skipped);
- junit_impl::junit_log_helper detailed_log;
- bool need_skipping_reason = false;
- bool skipped = false;
+ std::list<std::string> out;
- junit_log_formatter::map_trace_t::const_iterator it_element(m_map_test.find(tc.p_id));
- if( it_element != m_map_test.end() )
- {
- detailed_log = it_element->second;
+ test_unit_id id(tc.p_id);
+ while( id != m_ts.p_id && id != INV_TEST_UNIT_ID) {
+ test_unit const& tu = boost::unit_test::framework::get( id, TUT_ANY );
+ out.push_back("- disabled test unit: '" + tu.full_name() + "'\n");
+ if(m_map_test.count(id) > 0)
+ {
+ // junit has seen the reason: this is enough for constructing the chain
+ break;
+ }
+ id = tu.p_parent_id;
}
- else
+ junit_log_formatter::map_trace_t::const_iterator it_element_stack(m_map_test.find(id));
+ if( it_element_stack != m_map_test.end() )
{
- need_skipping_reason = true;
+ out.push_back("- reason: '" + it_element_stack->second.skipping_reason + "'");
+ out.push_front("Test case disabled because of the following chain of decision:\n");
}
+ return out;
+ }
+
+ std::string get_class_name(test_case const & tc) const {
std::string classname;
test_unit_id id(tc.p_parent_id);
- while( id != m_ts.p_id ) {
+ while( id != m_ts.p_id && id != INV_TEST_UNIT_ID ) {
test_unit const& tu = boost::unit_test::framework::get( id, TUT_ANY );
-
- if(need_skipping_reason)
- {
- test_results const& tr_parent = results_collector.results( id );
- if( tr_parent.p_skipped )
- {
- skipped = true;
- detailed_log.system_out+= "- disabled: " + tu.full_name() + "\n";
- }
- junit_log_formatter::map_trace_t::const_iterator it_element_stack(m_map_test.find(id));
- if( it_element_stack != m_map_test.end() )
- {
- detailed_log.system_out+= "- skipping decision: '" + it_element_stack->second.system_out + "'";
- detailed_log.system_out = "SKIPPING decision stack:\n" + detailed_log.system_out;
- need_skipping_reason = false;
- }
- }
-
classname = tu_name_normalize(tu.p_name) + "." + classname;
id = tu.p_parent_id;
}
@@ -175,23 +202,118 @@ public:
classname.erase(classname.size()-1);
}
+ return classname;
+ }
+
+ void write_testcase_header(test_case const & tc,
+ test_results const *tr = 0) const
+ {
//
// test case header
// total number of assertions
- m_stream << "<testcase assertions" << utils::attr_value() << tr.p_assertions_passed + tr.p_assertions_failed;
+ m_stream << "<testcase assertions" << utils::attr_value() << tr->p_assertions_passed + tr->p_assertions_failed;
// class name
+ const std::string classname = get_class_name(tc);
if(!classname.empty())
m_stream << " classname" << utils::attr_value() << classname;
// test case name and time taken
m_stream
<< " name" << utils::attr_value() << tu_name_normalize(tc.p_name)
- << " time" << utils::attr_value() << double(tr.p_duration_microseconds) * 1E-6
+ << " time" << utils::attr_value() << double(tr->p_duration_microseconds) * 1E-6
<< ">" << std::endl;
+ }
- if( tr.p_skipped || skipped ) {
+ void write_testcase_system_out(junit_impl::junit_log_helper const &detailed_log,
+ test_case const * tc,
+ bool skipped,
+ test_results const *tr = 0) const
+ {
+ // system-out + all info/messages, the object skips the empty entries
+ conditional_cdata_helper system_out_helper(m_stream, "system-out");
+
+ // indicate why the test has been skipped first
+ if( skipped ) {
+ std::list<std::string> skipping_decision_chain = build_skipping_chain(*tc);
+ for(std::list<std::string>::const_iterator it(skipping_decision_chain.begin()), ite(skipping_decision_chain.end());
+ it != ite;
+ ++it)
+ {
+ system_out_helper(*it);
+ }
+ }
+
+ // stdout
+ for(std::list<std::string>::const_iterator it(detailed_log.system_out.begin()), ite(detailed_log.system_out.end());
+ it != ite;
+ ++it)
+ {
+ system_out_helper(*it);
+ }
+
+ // warning/info message last
+ for(std::vector< junit_impl::junit_log_helper::assertion_entry >::const_iterator it(detailed_log.assertion_entries.begin());
+ it != detailed_log.assertion_entries.end();
+ ++it)
+ {
+ if(it->log_entry != junit_impl::junit_log_helper::assertion_entry::log_entry_info)
+ continue;
+ system_out_helper(it->output);
+ }
+ }
+
+ void write_testcase_system_err(junit_impl::junit_log_helper const &detailed_log,
+ test_case const * tc,
+ test_results const *tr = 0) const
+ {
+ // system-err output + test case informations
+ bool has_failed = (tr != 0) ? !tr->passed() : false;
+ if(!detailed_log.system_err.empty() || has_failed)
+ {
+ conditional_cdata_helper system_err_helper(m_stream, "system-err");
+ std::ostringstream o;
+ if(has_failed) {
+ o << "Failures detected in:" << std::endl;
+ }
+ else {
+ o << "ERROR STREAM:" << std::endl;
+ }
+
+ o << "- test case: " << tc->full_name() << std::endl;
+ if(!tc->p_description.value.empty())
+ o << " '" << tc->p_description << "'";
+
+ o << std::endl
+ << "- file: " << file_basename(tc->p_file_name) << std::endl
+ << "- line: " << tc->p_line_num << std::endl
+ ;
+
+ if(!detailed_log.system_err.empty())
+ o << std::endl << "STDERR BEGIN: ------------" << std::endl;
+
+ system_err_helper(o.str());
+ for(std::list<std::string>::const_iterator it(detailed_log.system_err.begin()), ite(detailed_log.system_err.end());
+ it != ite;
+ ++it)
+ {
+ system_err_helper(*it);
+ }
+
+ if(!detailed_log.system_err.empty())
+ o << std::endl << "STDERR END ------------" << std::endl;
+ }
+ }
+
+ void output_detailed_logs(junit_impl::junit_log_helper const &detailed_log,
+ test_case const & tc,
+ bool skipped,
+ test_results const *tr = 0) const
+ {
+ write_testcase_header(tc, tr);
+
+ if( skipped ) {
m_stream << "<skipped/>" << std::endl;
}
else {
@@ -209,45 +331,24 @@ public:
}
}
- // system-out + all info/messages
- std::string system_out = detailed_log.system_out;
- for(std::vector< junit_impl::junit_log_helper::assertion_entry >::const_iterator it(detailed_log.assertion_entries.begin());
- it != detailed_log.assertion_entries.end();
- ++it)
- {
- if(it->log_entry != junit_impl::junit_log_helper::assertion_entry::log_entry_info)
- continue;
- system_out += it->output;
- }
+ write_testcase_system_out(detailed_log, &tc, skipped, tr);
+ write_testcase_system_err(detailed_log, &tc, tr);
+ m_stream << "</testcase>" << std::endl;
+ }
- if(!system_out.empty()) {
- m_stream
- << "<system-out>"
- << utils::cdata() << system_out
- << "</system-out>"
- << std::endl;
- }
+ void visit( test_case const& tc )
+ {
- // system-err output + test case informations
- std::string system_err = detailed_log.system_err;
+ test_results const& tr = results_collector.results( tc.p_id );
+ junit_log_formatter::map_trace_t::const_iterator it_find = m_map_test.find(tc.p_id);
+ if(it_find == m_map_test.end())
{
- // test case information (redundant but useful)
- std::ostringstream o;
- o << "Test case:" << std::endl
- << "- name: " << tc.full_name() << std::endl
- << "- description: '" << tc.p_description << "'" << std::endl
- << "- file: " << file_basename(tc.p_file_name) << std::endl
- << "- line: " << tc.p_line_num << std::endl
- ;
- system_err = o.str() + system_err;
+ // test has been skipped and not seen by the logger
+ output_detailed_logs(junit_impl::junit_log_helper(), tc, true, &tr);
+ }
+ else {
+ output_detailed_logs(it_find->second, tc, tr.p_skipped, &tr);
}
- m_stream
- << "<system-err>"
- << utils::cdata() << system_err
- << "</system-err>"
- << std::endl;
-
- m_stream << "</testcase>" << std::endl;
}
bool test_suite_start( test_suite const& ts )
@@ -257,8 +358,6 @@ public:
return true;
test_results const& tr = results_collector.results( ts.p_id );
-
- m_stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" << std::endl;
m_stream << "<testsuite";
m_stream
@@ -292,6 +391,10 @@ public:
{
if( m_ts.p_id != ts.p_id )
return;
+
+ write_testcase_system_out(runner_log, 0, false, 0);
+ write_testcase_system_err(runner_log, 0, 0);
+
m_stream << "</testsuite>";
}
@@ -300,6 +403,7 @@ private:
std::ostream& m_stream;
test_unit const& m_ts;
junit_log_formatter::map_trace_t const& m_map_test;
+ junit_impl::junit_log_helper const& runner_log;
size_t m_id;
bool m_display_build_info;
};
@@ -309,9 +413,26 @@ private:
void
junit_log_formatter::log_finish( std::ostream& ostr )
{
- junit_result_helper ch( ostr, boost::unit_test::framework::get( root_id, TUT_SUITE ), map_tests, m_display_build_info );
- traverse_test_tree( root_id, ch, true ); // last is to ignore disabled suite special handling
+ ostr << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" << std::endl;
+
+ // getting the root test suite
+ if(!map_tests.empty()) {
+ test_unit* root = &boost::unit_test::framework::get( map_tests.begin()->first, TUT_ANY );
+ // looking for the root of the SUBtree (we stay in the subtree)
+ while(root->p_parent_id != INV_TEST_UNIT_ID && map_tests.count(root->p_parent_id) > 0) {
+ root = &boost::unit_test::framework::get( root->p_parent_id, TUT_ANY );
+ }
+ junit_result_helper ch( ostr, *root, map_tests, this->runner_log_entry, m_display_build_info );
+ traverse_test_tree( root->p_id, ch, true ); // last is to ignore disabled suite special handling
+ }
+ else {
+ ostr << "<testsuites errors=\"1\">";
+ ostr << "<testsuite errors=\"1\" name=\"boost-test-framework\">";
+ ostr << "<testcase assertions=\"1\" name=\"test-setup\">";
+ ostr << "<system-out>Incorrect setup: no test case executed</system-out>";
+ ostr << "</testcase></testsuite></testsuites>";
+ }
return;
}
@@ -328,8 +449,6 @@ junit_log_formatter::log_build_info( std::ostream& ostr )
void
junit_log_formatter::test_unit_start( std::ostream& ostr, test_unit const& tu )
{
- if(list_path_to_root.empty())
- root_id = tu.p_id;
list_path_to_root.push_back( tu.p_id );
map_tests.insert(std::make_pair(tu.p_id, junit_impl::junit_log_helper())); // current_test_case_id not working here
}
@@ -358,16 +477,11 @@ junit_log_formatter::test_unit_aborted( std::ostream& os, test_unit const& tu )
void
junit_log_formatter::test_unit_skipped( std::ostream& ostr, test_unit const& tu, const_string reason )
{
- if(tu.p_type == TUT_CASE)
- {
- junit_impl::junit_log_helper& v = map_tests[tu.p_id];
- v.system_out.assign(reason.begin(), reason.end());
- }
- else
- {
- junit_impl::junit_log_helper& v = map_tests[tu.p_id];
- v.system_out.assign(reason.begin(), reason.end());
- }
+ // if a test unit is skipped, then the start of this TU has not been called yet.
+ // we cannot use get_current_log_entry here, but the TU id should appear in the map.
+ // The "skip" boolean is given by the boost.test framework
+ junit_impl::junit_log_helper& v = map_tests[tu.p_id]; // not sure if we can use get_current_log_entry()
+ v.skipping_reason.assign(reason.begin(), reason.end());
}
//____________________________________________________________________________//
@@ -380,67 +494,62 @@ junit_log_formatter::log_exception_start( std::ostream& ostr, log_checkpoint_dat
m_is_last_assertion_or_error = false;
- if(!list_path_to_root.empty())
- {
- junit_impl::junit_log_helper& last_entry = map_tests[list_path_to_root.back()];
-
- junit_impl::junit_log_helper::assertion_entry entry;
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
- entry.logentry_message = "unexpected exception";
- entry.log_entry = junit_impl::junit_log_helper::assertion_entry::log_entry_error;
+ junit_impl::junit_log_helper::assertion_entry entry;
- switch(ex.code())
- {
- case execution_exception::cpp_exception_error:
- entry.logentry_type = "uncaught exception";
- break;
- case execution_exception::timeout_error:
- entry.logentry_type = "execution timeout";
- break;
- case execution_exception::user_error:
- entry.logentry_type = "user, assert() or CRT error";
- break;
- case execution_exception::user_fatal_error:
- // Looks like never used
- entry.logentry_type = "user fatal error";
- break;
- case execution_exception::system_error:
- entry.logentry_type = "system error";
- break;
- case execution_exception::system_fatal_error:
- entry.logentry_type = "system fatal error";
- break;
- default:
- entry.logentry_type = "no error"; // not sure how to handle this one
- break;
- }
+ entry.logentry_message = "unexpected exception";
+ entry.log_entry = junit_impl::junit_log_helper::assertion_entry::log_entry_error;
- o << "UNCAUGHT EXCEPTION:" << std::endl;
- if( !loc.m_function.is_empty() )
- o << "- function: \"" << loc.m_function << "\"" << std::endl;
-
- o << "- file: " << file_basename(loc.m_file_name) << std::endl
- << "- line: " << loc.m_line_num << std::endl
- << std::endl;
+ switch(ex.code())
+ {
+ case execution_exception::cpp_exception_error:
+ entry.logentry_type = "uncaught exception";
+ break;
+ case execution_exception::timeout_error:
+ entry.logentry_type = "execution timeout";
+ break;
+ case execution_exception::user_error:
+ entry.logentry_type = "user, assert() or CRT error";
+ break;
+ case execution_exception::user_fatal_error:
+ // Looks like never used
+ entry.logentry_type = "user fatal error";
+ break;
+ case execution_exception::system_error:
+ entry.logentry_type = "system error";
+ break;
+ case execution_exception::system_fatal_error:
+ entry.logentry_type = "system fatal error";
+ break;
+ default:
+ entry.logentry_type = "no error"; // not sure how to handle this one
+ break;
+ }
- o << "\nEXCEPTION STACK TRACE: --------------\n" << ex.what()
- << "\n-------------------------------------";
+ o << "UNCAUGHT EXCEPTION:" << std::endl;
+ if( !loc.m_function.is_empty() )
+ o << "- function: \"" << loc.m_function << "\"" << std::endl;
- if( !checkpoint_data.m_file_name.is_empty() ) {
- o << std::endl << std::endl
- << "Last checkpoint:" << std::endl
- << "- message: \"" << checkpoint_data.m_message << "\"" << std::endl
- << "- file: " << file_basename(checkpoint_data.m_file_name) << std::endl
- << "- line: " << checkpoint_data.m_line_num << std::endl
- ;
- }
+ o << "- file: " << file_basename(loc.m_file_name) << std::endl
+ << "- line: " << loc.m_line_num << std::endl
+ << std::endl;
- entry.output = o.str();
+ o << "\nEXCEPTION STACK TRACE: --------------\n" << ex.what()
+ << "\n-------------------------------------";
- last_entry.assertion_entries.push_back(entry);
+ if( !checkpoint_data.m_file_name.is_empty() ) {
+ o << std::endl << std::endl
+ << "Last checkpoint:" << std::endl
+ << "- message: \"" << checkpoint_data.m_message << "\"" << std::endl
+ << "- file: " << file_basename(checkpoint_data.m_file_name) << std::endl
+ << "- line: " << checkpoint_data.m_line_num << std::endl
+ ;
}
- // check what to do with this one
+ entry.output = o.str();
+
+ last_entry.assertion_entries.push_back(entry);
}
//____________________________________________________________________________//
@@ -449,8 +558,8 @@ void
junit_log_formatter::log_exception_finish( std::ostream& ostr )
{
// sealing the last entry
- assert(!map_tests[list_path_to_root.back()].assertion_entries.back().sealed);
- map_tests[list_path_to_root.back()].assertion_entries.back().sealed = true;
+ assert(!get_current_log_entry().assertion_entries.back().sealed);
+ get_current_log_entry().assertion_entries.back().sealed = true;
}
//____________________________________________________________________________//
@@ -458,17 +567,36 @@ junit_log_formatter::log_exception_finish( std::ostream& ostr )
void
junit_log_formatter::log_entry_start( std::ostream& ostr, log_entry_data const& entry_data, log_entry_types let )
{
- junit_impl::junit_log_helper& last_entry = map_tests[list_path_to_root.back()];
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ last_entry.skipping = false;
m_is_last_assertion_or_error = true;
switch(let)
{
case unit_test_log_formatter::BOOST_UTL_ET_INFO:
+ {
+ if(m_log_level_internal > log_successful_tests) {
+ last_entry.skipping = true;
+ break;
+ }
+ // no break on purpose
+ }
case unit_test_log_formatter::BOOST_UTL_ET_MESSAGE:
+ {
+ if(m_log_level_internal > log_messages) {
+ last_entry.skipping = true;
+ break;
+ }
+ // no break on purpose
+ }
case unit_test_log_formatter::BOOST_UTL_ET_WARNING:
{
+ if(m_log_level_internal > log_warnings) {
+ last_entry.skipping = true;
+ break;
+ }
std::ostringstream o;
-
junit_impl::junit_log_helper::assertion_entry entry;
+
entry.log_entry = junit_impl::junit_log_helper::assertion_entry::log_entry_info;
entry.logentry_message = "info";
entry.logentry_type = "message";
@@ -505,22 +633,18 @@ junit_log_formatter::log_entry_start( std::ostream& ostr, log_entry_data const&
break;
}
}
-
}
- //____________________________________________________________________________//
-
-
-
//____________________________________________________________________________//
void
junit_log_formatter::log_entry_value( std::ostream& ostr, const_string value )
{
- assert(map_tests[list_path_to_root.back()].assertion_entries.empty() || !map_tests[list_path_to_root.back()].assertion_entries.back().sealed);
- junit_impl::junit_log_helper& last_entry = map_tests[list_path_to_root.back()];
- std::ostringstream o;
- utils::print_escaped_cdata( o, value );
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ if(last_entry.skipping)
+ return;
+
+ assert(last_entry.assertion_entries.empty() || !last_entry.assertion_entries.back().sealed);
if(!last_entry.assertion_entries.empty())
{
@@ -531,7 +655,7 @@ junit_log_formatter::log_entry_value( std::ostream& ostr, const_string value )
{
// this may be a message coming from another observer
// the prefix is set in the log_entry_start
- last_entry.system_out += value;
+ last_entry.system_out.push_back(std::string(value.begin(), value.end()));
}
}
@@ -540,16 +664,22 @@ junit_log_formatter::log_entry_value( std::ostream& ostr, const_string value )
void
junit_log_formatter::log_entry_finish( std::ostream& ostr )
{
- assert(map_tests[list_path_to_root.back()].assertion_entries.empty() || !map_tests[list_path_to_root.back()].assertion_entries.back().sealed);
- junit_impl::junit_log_helper& last_entry = map_tests[list_path_to_root.back()];
- if(!last_entry.assertion_entries.empty()) {
- junit_impl::junit_log_helper::assertion_entry& log_entry = last_entry.assertion_entries.back();
- log_entry.output += "\n\n"; // quote end, CR
- log_entry.sealed = true;
- }
- else {
- last_entry.system_out += "\n\n"; // quote end, CR
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ if(!last_entry.skipping)
+ {
+ assert(last_entry.assertion_entries.empty() || !last_entry.assertion_entries.back().sealed);
+
+ if(!last_entry.assertion_entries.empty()) {
+ junit_impl::junit_log_helper::assertion_entry& log_entry = last_entry.assertion_entries.back();
+ log_entry.output += "\n\n"; // quote end, CR
+ log_entry.sealed = true;
+ }
+ else {
+ last_entry.system_out.push_back("\n\n"); // quote end, CR
+ }
}
+
+ last_entry.skipping = false;
}
//____________________________________________________________________________//
@@ -557,16 +687,21 @@ junit_log_formatter::log_entry_finish( std::ostream& ostr )
void
junit_log_formatter::entry_context_start( std::ostream& ostr, log_level )
{
- std::vector< junit_impl::junit_log_helper::assertion_entry > &v_failure_or_error = map_tests[list_path_to_root.back()].assertion_entries;
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ if(last_entry.skipping)
+ return;
+
+ std::vector< junit_impl::junit_log_helper::assertion_entry > &v_failure_or_error = last_entry.assertion_entries;
assert(!v_failure_or_error.back().sealed);
+ junit_impl::junit_log_helper::assertion_entry& last_log_entry = v_failure_or_error.back();
if(m_is_last_assertion_or_error)
{
- v_failure_or_error.back().output += "\n- context:\n";
+ last_log_entry.output += "\n- context:\n";
}
else
{
- v_failure_or_error.back().output += "\n\nCONTEXT:\n";
+ last_log_entry.output += "\n\nCONTEXT:\n";
}
}
@@ -576,7 +711,10 @@ void
junit_log_formatter::entry_context_finish( std::ostream& ostr )
{
// no op, may be removed
- assert(!map_tests[list_path_to_root.back()].assertion_entries.back().sealed);
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ if(last_entry.skipping)
+ return;
+ assert(!get_current_log_entry().assertion_entries.back().sealed);
}
//____________________________________________________________________________//
@@ -584,8 +722,15 @@ junit_log_formatter::entry_context_finish( std::ostream& ostr )
void
junit_log_formatter::log_entry_context( std::ostream& ostr, const_string context_descr )
{
- assert(!map_tests[list_path_to_root.back()].assertion_entries.back().sealed);
- map_tests[list_path_to_root.back()].assertion_entries.back().output += (m_is_last_assertion_or_error ? " - '": "- '") + std::string(context_descr.begin(), context_descr.end()) + "'\n"; // quote end
+ junit_impl::junit_log_helper& last_entry = get_current_log_entry();
+ if(last_entry.skipping)
+ return;
+
+ assert(!last_entry.assertion_entries.back().sealed);
+ junit_impl::junit_log_helper::assertion_entry& last_log_entry = get_current_log_entry().assertion_entries.back();
+
+ last_log_entry.output +=
+ (m_is_last_assertion_or_error ? " - '": "- '") + std::string(context_descr.begin(), context_descr.end()) + "'\n"; // quote end
}
//____________________________________________________________________________//
diff --git a/boost/test/impl/plain_report_formatter.ipp b/boost/test/impl/plain_report_formatter.ipp
index 262083eeaa..cbf5a4c029 100644
--- a/boost/test/impl/plain_report_formatter.ipp
+++ b/boost/test/impl/plain_report_formatter.ipp
@@ -83,7 +83,7 @@ void
plain_report_formatter::results_report_start( std::ostream& ostr )
{
m_indent = 0;
- m_color_output = runtime_config::get<bool>( runtime_config::COLOR_OUTPUT );
+ m_color_output = runtime_config::get<bool>( runtime_config::btrt_color_output );
ostr << '\n';
}
diff --git a/boost/test/impl/progress_monitor.ipp b/boost/test/impl/progress_monitor.ipp
index 7fb3baf8f0..34149745cf 100644
--- a/boost/test/impl/progress_monitor.ipp
+++ b/boost/test/impl/progress_monitor.ipp
@@ -124,7 +124,7 @@ progress_monitor_impl& s_pm_impl() { static progress_monitor_impl the_inst; retu
void
progress_monitor_t::test_start( counter_t test_cases_amount )
{
- s_pm_impl().m_color_output = runtime_config::get<bool>( runtime_config::COLOR_OUTPUT );
+ s_pm_impl().m_color_output = runtime_config::get<bool>( runtime_config::btrt_color_output );
PM_SCOPED_COLOR();
diff --git a/boost/test/impl/test_tools.ipp b/boost/test/impl/test_tools.ipp
index a6b20a7729..853b3913ee 100644
--- a/boost/test/impl/test_tools.ipp
+++ b/boost/test/impl/test_tools.ipp
@@ -68,6 +68,12 @@ namespace tt_detail {
// ************************************************************************** //
void
+print_log_value<bool>::operator()( std::ostream& ostr, bool t )
+{
+ ostr << std::boolalpha << t;
+}
+
+void
print_log_value<char>::operator()( std::ostream& ostr, char t )
{
if( (std::isprint)( static_cast<unsigned char>(t) ) )
@@ -113,6 +119,14 @@ print_log_value<wchar_t const*>::operator()( std::ostream& ostr, wchar_t const*
ostr << ( t ? t : L"null string" );
}
+#if !defined(BOOST_NO_CXX11_NULLPTR)
+void
+print_log_value<std::nullptr_t>::operator()( std::ostream& ostr, std::nullptr_t p )
+{
+ ostr << "nullptr";
+}
+#endif
+
//____________________________________________________________________________//
// ************************************************************************** //
diff --git a/boost/test/impl/unit_test_log.ipp b/boost/test/impl/unit_test_log.ipp
index 5f3fa6510d..c52260b94b 100644
--- a/boost/test/impl/unit_test_log.ipp
+++ b/boost/test/impl/unit_test_log.ipp
@@ -168,7 +168,7 @@ unit_test_log_t::test_start( counter_t test_cases_amount )
current_logger_data.m_log_formatter->log_start( current_logger_data.stream(), test_cases_amount );
- if( runtime_config::get<bool>( runtime_config::BUILD_INFO ) )
+ if( runtime_config::get<bool>( runtime_config::btrt_build_info ) )
current_logger_data.m_log_formatter->log_build_info( current_logger_data.stream() );
current_logger_data.m_entry_in_progress = false;
@@ -442,8 +442,11 @@ unit_test_log_t&
unit_test_log_t::operator<<( lazy_ostream const& value )
{
BOOST_TEST_FOREACH( unit_test_log_data_helper_impl&, current_logger_data, s_log_impl().m_log_formatter_data ) {
- if( current_logger_data.m_enabled && s_log_impl().m_entry_data.m_level >= current_logger_data.get_log_level() && !value.empty() && log_entry_start(current_logger_data.m_format) )
- current_logger_data.m_log_formatter->log_entry_value( current_logger_data.stream(), value );
+ if( current_logger_data.m_enabled && s_log_impl().m_entry_data.m_level >= current_logger_data.get_log_level() && !value.empty() ) {
+ if( log_entry_start(current_logger_data.m_format) ) {
+ current_logger_data.m_log_formatter->log_entry_value( current_logger_data.stream(), value );
+ }
+ }
}
return *this;
}
diff --git a/boost/test/impl/unit_test_main.ipp b/boost/test/impl/unit_test_main.ipp
index db61930652..dabe328c8c 100644
--- a/boost/test/impl/unit_test_main.ipp
+++ b/boost/test/impl/unit_test_main.ipp
@@ -188,7 +188,7 @@ unit_test_main( init_unit_test_func init_func, int argc, char* argv[] )
BOOST_TEST_I_TRY {
framework::init( init_func, argc, argv );
- if( runtime_config::get<bool>( runtime_config::WAIT_FOR_DEBUGGER ) ) {
+ if( runtime_config::get<bool>( runtime_config::btrt_wait_for_debugger ) ) {
results_reporter::get_stream() << "Press any key to continue..." << std::endl;
// getchar is defined as a macro in uClibc. Use parenthesis to fix
@@ -199,7 +199,7 @@ unit_test_main( init_unit_test_func init_func, int argc, char* argv[] )
framework::finalize_setup_phase();
- output_format list_cont = runtime_config::get<output_format>( runtime_config::LIST_CONTENT );
+ output_format list_cont = runtime_config::get<output_format>( runtime_config::btrt_list_content );
if( list_cont != unit_test::OF_INVALID ) {
if( list_cont == unit_test::OF_DOT ) {
ut_detail::dot_content_reporter reporter( results_reporter::get_stream() );
@@ -215,7 +215,7 @@ unit_test_main( init_unit_test_func init_func, int argc, char* argv[] )
return boost::exit_success;
}
- if( runtime_config::get<bool>( runtime_config::LIST_LABELS ) ) {
+ if( runtime_config::get<bool>( runtime_config::btrt_list_labels ) ) {
ut_detail::labels_collector collector;
traverse_test_tree( framework::master_test_suite().p_id, collector, true );
@@ -232,7 +232,7 @@ unit_test_main( init_unit_test_func init_func, int argc, char* argv[] )
results_reporter::make_report();
- result_code = !runtime_config::get<bool>( runtime_config::RESULT_CODE )
+ result_code = !runtime_config::get<bool>( runtime_config::btrt_result_code )
? boost::exit_success
: results_collector.results( framework::master_test_suite().p_id ).result_code();
}
diff --git a/boost/test/impl/unit_test_monitor.ipp b/boost/test/impl/unit_test_monitor.ipp
index 304c8f1c04..fdd36f7c3a 100644
--- a/boost/test/impl/unit_test_monitor.ipp
+++ b/boost/test/impl/unit_test_monitor.ipp
@@ -37,11 +37,11 @@ unit_test_monitor_t::error_level
unit_test_monitor_t::execute_and_translate( boost::function<void ()> const& func, unsigned timeout )
{
BOOST_TEST_I_TRY {
- p_catch_system_errors.value = runtime_config::get<bool>( runtime_config::CATCH_SYS_ERRORS );
+ p_catch_system_errors.value = runtime_config::get<bool>( runtime_config::btrt_catch_sys_errors );
p_timeout.value = timeout;
- p_auto_start_dbg.value = runtime_config::get<bool>( runtime_config::AUTO_START_DBG );
- p_use_alt_stack.value = runtime_config::get<bool>( runtime_config::USE_ALT_STACK );
- p_detect_fp_exceptions.value = runtime_config::get<bool>( runtime_config::DETECT_FP_EXCEPT );
+ p_auto_start_dbg.value = runtime_config::get<bool>( runtime_config::btrt_auto_start_dbg );
+ p_use_alt_stack.value = runtime_config::get<bool>( runtime_config::btrt_use_alt_stack );
+ p_detect_fp_exceptions.value = runtime_config::get<bool>( runtime_config::btrt_detect_fp_except );
vexecute( func );
}
diff --git a/boost/test/impl/unit_test_parameters.ipp b/boost/test/impl/unit_test_parameters.ipp
index 315942e6c0..b825c46d6a 100644
--- a/boost/test/impl/unit_test_parameters.ipp
+++ b/boost/test/impl/unit_test_parameters.ipp
@@ -71,35 +71,35 @@ namespace rt = boost::runtime;
namespace runtime_config {
// UTF parameters
-std::string AUTO_START_DBG = "auto_start_dbg";
-std::string BREAK_EXEC_PATH = "break_exec_path";
-std::string BUILD_INFO = "build_info";
-std::string CATCH_SYS_ERRORS = "catch_system_errors";
-std::string COLOR_OUTPUT = "color_output";
-std::string DETECT_FP_EXCEPT = "detect_fp_exceptions";
-std::string DETECT_MEM_LEAKS = "detect_memory_leaks";
-std::string LIST_CONTENT = "list_content";
-std::string LIST_LABELS = "list_labels";
-std::string LOG_FORMAT = "log_format";
-std::string LOG_LEVEL = "log_level";
-std::string LOG_SINK = "log_sink";
-std::string COMBINED_LOGGER = "logger";
-std::string OUTPUT_FORMAT = "output_format";
-std::string RANDOM_SEED = "random";
-std::string REPORT_FORMAT = "report_format";
-std::string REPORT_LEVEL = "report_level";
-std::string REPORT_MEM_LEAKS = "report_memory_leaks_to";
-std::string REPORT_SINK = "report_sink";
-std::string RESULT_CODE = "result_code";
-std::string RUN_FILTERS = "run_test";
-std::string SAVE_TEST_PATTERN = "save_pattern";
-std::string SHOW_PROGRESS = "show_progress";
-std::string USE_ALT_STACK = "use_alt_stack";
-std::string WAIT_FOR_DEBUGGER = "wait_for_debugger";
-
-std::string HELP = "help";
-std::string USAGE = "usage";
-std::string VERSION = "version";
+std::string btrt_auto_start_dbg = "auto_start_dbg";
+std::string btrt_break_exec_path = "break_exec_path";
+std::string btrt_build_info = "build_info";
+std::string btrt_catch_sys_errors = "catch_system_errors";
+std::string btrt_color_output = "color_output";
+std::string btrt_detect_fp_except = "detect_fp_exceptions";
+std::string btrt_detect_mem_leaks = "detect_memory_leaks";
+std::string btrt_list_content = "list_content";
+std::string btrt_list_labels = "list_labels";
+std::string btrt_log_format = "log_format";
+std::string btrt_log_level = "log_level";
+std::string btrt_log_sink = "log_sink";
+std::string btrt_combined_logger = "logger";
+std::string btrt_output_format = "output_format";
+std::string btrt_random_seed = "random";
+std::string btrt_report_format = "report_format";
+std::string btrt_report_level = "report_level";
+std::string btrt_report_mem_leaks = "report_memory_leaks_to";
+std::string btrt_report_sink = "report_sink";
+std::string btrt_result_code = "result_code";
+std::string btrt_run_filters = "run_test";
+std::string btrt_save_test_pattern = "save_pattern";
+std::string btrt_show_progress = "show_progress";
+std::string btrt_use_alt_stack = "use_alt_stack";
+std::string btrt_wait_for_debugger = "wait_for_debugger";
+
+std::string btrt_help = "help";
+std::string btrt_usage = "usage";
+std::string btrt_version = "version";
//____________________________________________________________________________//
@@ -108,11 +108,11 @@ namespace {
void
register_parameters( rt::parameters_store& store )
{
- rt::option auto_start_dbg( AUTO_START_DBG, (
+ rt::option auto_start_dbg( btrt_auto_start_dbg, (
rt::description = "Automatically attaches debugger in case of system level failure (signal).",
rt::env_var = "BOOST_TEST_AUTO_START_DBG",
- rt::help = "Option " + AUTO_START_DBG + " specifies whether Boost.Test should attempt "
+ rt::help = "Option " + btrt_auto_start_dbg + " specifies whether Boost.Test should attempt "
"to attach a debugger when fatal system error occurs. At the moment this feature "
"is only available on a few selected platforms: Win32 and *nix. There is a "
"default debugger configured for these platforms. You can manually configure "
@@ -120,13 +120,13 @@ register_parameters( rt::parameters_store& store )
"Boost.Test debug API, specifically the function boost::debug::set_debugger."
));
- auto_start_dbg.add_cla_id( "--", AUTO_START_DBG, "=" );
+ auto_start_dbg.add_cla_id( "--", btrt_auto_start_dbg, "=" );
auto_start_dbg.add_cla_id( "-", "d", " " );
store.add( auto_start_dbg );
///////////////////////////////////////////////
- rt::parameter<std::string> break_exec_path( BREAK_EXEC_PATH, (
+ rt::parameter<std::string> break_exec_path( btrt_break_exec_path, (
rt::description = "For the exception safety testing allows to break at specific execution path.",
rt::env_var = "BOOST_TEST_BREAK_EXEC_PATH"
#ifndef BOOST_NO_CXX11_LAMBDAS
@@ -137,25 +137,25 @@ register_parameters( rt::parameters_store& store )
#endif
));
- break_exec_path.add_cla_id( "--", BREAK_EXEC_PATH, "=" );
+ break_exec_path.add_cla_id( "--", btrt_break_exec_path, "=" );
store.add( break_exec_path );
///////////////////////////////////////////////
- rt::option build_info( BUILD_INFO, (
+ rt::option build_info( btrt_build_info, (
rt::description = "Displays library build information.",
rt::env_var = "BOOST_TEST_BUILD_INFO",
- rt::help = "Option " + BUILD_INFO + " displays library build information, including: platform, "
+ rt::help = "Option " + btrt_build_info + " displays library build information, including: platform, "
"compiler, STL version and Boost version."
));
- build_info.add_cla_id( "--", BUILD_INFO, "=" );
+ build_info.add_cla_id( "--", btrt_build_info, "=" );
build_info.add_cla_id( "-", "i", " " );
store.add( build_info );
///////////////////////////////////////////////
- rt::option catch_sys_errors( CATCH_SYS_ERRORS, (
+ rt::option catch_sys_errors( btrt_catch_sys_errors, (
rt::description = "Allows to switch between catching and ignoring system errors (signals).",
rt::env_var = "BOOST_TEST_CATCH_SYSTEM_ERRORS",
rt::default_value =
@@ -164,7 +164,7 @@ register_parameters( rt::parameters_store& store )
#else
true,
#endif
- rt::help = "If option " + CATCH_SYS_ERRORS + " has value no the frameworks does not attempt to catch "
+ rt::help = "If option " + btrt_catch_sys_errors + " has value no the frameworks does not attempt to catch "
"asynchronous system failure events (signals on *NIX platforms or structured exceptions on Windows). "
" Default value is "
#ifdef BOOST_TEST_DEFAULTS_TO_CORE_DUMP
@@ -174,13 +174,13 @@ register_parameters( rt::parameters_store& store )
#endif
));
- catch_sys_errors.add_cla_id( "--", CATCH_SYS_ERRORS, "=", true );
+ catch_sys_errors.add_cla_id( "--", btrt_catch_sys_errors, "=", true );
catch_sys_errors.add_cla_id( "-", "s", " " );
store.add( catch_sys_errors );
///////////////////////////////////////////////
- rt::option color_output( COLOR_OUTPUT, (
+ rt::option color_output( btrt_color_output, (
rt::description = "Enables color output of the framework log and report messages.",
rt::env_var = "BOOST_TEST_COLOR_OUTPUT",
rt::help = "The framework is able to produce color output on systems which supports it. "
@@ -188,31 +188,31 @@ register_parameters( rt::parameters_store& store )
"does not produces color output."
));
- color_output.add_cla_id( "--", COLOR_OUTPUT, "=", true );
+ color_output.add_cla_id( "--", btrt_color_output, "=", true );
color_output.add_cla_id( "-", "x", " " );
store.add( color_output );
///////////////////////////////////////////////
- rt::option detect_fp_except( DETECT_FP_EXCEPT, (
+ rt::option detect_fp_except( btrt_detect_fp_except, (
rt::description = "Enables/disables floating point exceptions traps.",
rt::env_var = "BOOST_TEST_DETECT_FP_EXCEPTIONS",
- rt::help = "Option " + DETECT_FP_EXCEPT + " enables/disables hardware traps for the floating "
+ rt::help = "Option " + btrt_detect_fp_except + " enables/disables hardware traps for the floating "
"point exceptions (if supported on your platfrom)."
));
- detect_fp_except.add_cla_id( "--", DETECT_FP_EXCEPT, "=", true );
+ detect_fp_except.add_cla_id( "--", btrt_detect_fp_except, "=", true );
store.add( detect_fp_except );
///////////////////////////////////////////////
- rt::parameter<unsigned long> detect_mem_leaks( DETECT_MEM_LEAKS, (
+ rt::parameter<unsigned long> detect_mem_leaks( btrt_detect_mem_leaks, (
rt::description = "Turns on/off memory leaks detection (optionally breaking on specified alloc order number).",
rt::env_var = "BOOST_TEST_DETECT_MEMORY_LEAK",
rt::default_value = 1L,
rt::optional_value = 1L,
rt::value_hint = "<alloc order number>",
- rt::help = "Parameter " + DETECT_MEM_LEAKS + " enables/disables memory leaks detection. "
+ rt::help = "Parameter " + btrt_detect_mem_leaks + " enables/disables memory leaks detection. "
"This parameter has optional long integer value. The default value is 1, which "
"enables the memory leak detection. The value 0 disables memory leak detection. "
"Any value N greater than 1 is treated as leak allocation number and tells the "
@@ -220,12 +220,12 @@ register_parameters( rt::parameters_store& store )
"omitted the default value is assumed."
));
- detect_mem_leaks.add_cla_id( "--", DETECT_MEM_LEAKS, "=" );
+ detect_mem_leaks.add_cla_id( "--", btrt_detect_mem_leaks, "=" );
store.add( detect_mem_leaks );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::output_format> list_content( LIST_CONTENT, (
+ rt::enum_parameter<unit_test::output_format> list_content( btrt_list_content, (
rt::description = "Lists the content of test tree - names of all test suites and test cases.",
rt::env_var = "BOOST_TEST_LIST_CONTENT",
rt::default_value = OF_INVALID,
@@ -242,30 +242,30 @@ register_parameters( rt::parameters_store& store )
( "DOT", OF_DOT )
,
#endif
- rt::help = "Parameter " + LIST_CONTENT + " instructs the framework to list the content "
+ rt::help = "Parameter " + btrt_list_content + " instructs the framework to list the content "
"of the test module instead of executing the test cases. Parameter accepts "
"optional string value indicating the format of the output. Currently the "
"framework supports two formats: human readable format (HRF) and dot graph "
"format (DOT). If value is omitted HRF value is assumed."
));
- list_content.add_cla_id( "--", LIST_CONTENT, "=" );
+ list_content.add_cla_id( "--", btrt_list_content, "=" );
store.add( list_content );
///////////////////////////////////////////////
- rt::option list_labels( LIST_LABELS, (
+ rt::option list_labels( btrt_list_labels, (
rt::description = "Lists all available labels.",
rt::env_var = "BOOST_TEST_LIST_LABELS",
- rt::help = "Option " + LIST_LABELS + " instructs the framework to list all the the labels "
+ rt::help = "Option " + btrt_list_labels + " instructs the framework to list all the the labels "
"defined in the test module instead of executing the test cases."
));
- list_labels.add_cla_id( "--", LIST_LABELS, "=" );
+ list_labels.add_cla_id( "--", btrt_list_labels, "=" );
store.add( list_labels );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::output_format> log_format( LOG_FORMAT, (
+ rt::enum_parameter<unit_test::output_format> log_format( btrt_log_format, (
rt::description = "Specifies log format.",
rt::env_var = "BOOST_TEST_LOG_FORMAT",
rt::default_value = OF_CLF,
@@ -285,7 +285,7 @@ register_parameters( rt::parameters_store& store )
( "JUNIT", OF_JUNIT )
,
#endif
- rt::help = "Parameter " + LOG_FORMAT + " allows to set the frameowrk's log format to one "
+ rt::help = "Parameter " + btrt_log_format + " allows to set the frameowrk's log format to one "
"of the formats supplied by the framework. The only acceptable values for this "
"parameter are the names of the output formats supplied by the framework. By "
"default the framework uses human readable format (HRF) for testing log. This "
@@ -293,13 +293,13 @@ register_parameters( rt::parameters_store& store )
"or JUNIT as log format, which are easier to process by testing automation tools."
));
- log_format.add_cla_id( "--", LOG_FORMAT, "=" );
+ log_format.add_cla_id( "--", btrt_log_format, "=" );
log_format.add_cla_id( "-", "f", " " );
store.add( log_format );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::log_level> log_level( LOG_LEVEL, (
+ rt::enum_parameter<unit_test::log_level> log_level( btrt_log_level, (
rt::description = "Specifies log level.",
rt::env_var = "BOOST_TEST_LOG_LEVEL",
rt::default_value = log_all_errors,
@@ -333,7 +333,7 @@ register_parameters( rt::parameters_store& store )
( "nothing" , log_nothing )
,
#endif
- rt::help = "Parameter " + LOG_LEVEL + " allows to set the framework's log level. "
+ rt::help = "Parameter " + btrt_log_level + " allows to set the framework's log level. "
"Log level defines the verbosity of testing log produced by a testing "
"module. The verbosity ranges from a complete log, when all assertions "
"(both successful and failing) are reported, all notifications about "
@@ -341,29 +341,29 @@ register_parameters( rt::parameters_store& store )
"is reported to a testing log stream."
));
- log_level.add_cla_id( "--", LOG_LEVEL, "=" );
+ log_level.add_cla_id( "--", btrt_log_level, "=" );
log_level.add_cla_id( "-", "l", " " );
store.add( log_level );
///////////////////////////////////////////////
- rt::parameter<std::string> log_sink( LOG_SINK, (
+ rt::parameter<std::string> log_sink( btrt_log_sink, (
rt::description = "Specifies log sink: stdout(default), stderr or file name.",
rt::env_var = "BOOST_TEST_LOG_SINK",
rt::value_hint = "<stderr|stdout|file name>",
- rt::help = "Parameter " + LOG_SINK + " allows to set the log sink - location "
+ rt::help = "Parameter " + btrt_log_sink + " allows to set the log sink - location "
"where we report the log to, thus it allows to easily redirect the "
"test logs to file or standard streams. By default testing log is "
"directed to standard output."
));
- log_sink.add_cla_id( "--", LOG_SINK, "=" );
+ log_sink.add_cla_id( "--", btrt_log_sink, "=" );
log_sink.add_cla_id( "-", "k", " " );
store.add( log_sink );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::output_format> output_format( OUTPUT_FORMAT, (
+ rt::enum_parameter<unit_test::output_format> output_format( btrt_output_format, (
rt::description = "Specifies output format (both log and report).",
rt::env_var = "BOOST_TEST_OUTPUT_FORMAT",
rt::enum_values<unit_test::output_format>::value =
@@ -380,8 +380,8 @@ register_parameters( rt::parameters_store& store )
( "XML", OF_XML )
,
#endif
- rt::help = "Parameter " + OUTPUT_FORMAT + " combines an effect of " + REPORT_FORMAT +
- " and " + LOG_FORMAT + " parameters. This parameter has higher priority "
+ rt::help = "Parameter " + btrt_output_format + " combines an effect of " + btrt_report_format +
+ " and " + btrt_log_format + " parameters. This parameter has higher priority "
"than either one of them. In other words if this parameter is specified "
"it overrides the value of other two parameters. This parameter does not "
"have a default value. The only acceptable values are string names of "
@@ -389,33 +389,33 @@ register_parameters( rt::parameters_store& store )
"automation tools processing."
));
- output_format.add_cla_id( "--", OUTPUT_FORMAT, "=" );
+ output_format.add_cla_id( "--", btrt_output_format, "=" );
output_format.add_cla_id( "-", "o", " " );
store.add( output_format );
/////////////////////////////////////////////// combined logger option
- rt::parameter<std::string,rt::REPEATABLE_PARAM> combined_logger( COMBINED_LOGGER, (
+ rt::parameter<std::string,rt::REPEATABLE_PARAM> combined_logger( btrt_combined_logger, (
rt::description = "Specifies log level and sink for one or several log format",
rt::env_var = "BOOST_TEST_LOGGER",
rt::value_hint = "log_format:log_level:log_sink",
- rt::help = "Parameter " + COMBINED_LOGGER + " allows to specify the logger type, level and sink\n"
+ rt::help = "Parameter " + btrt_combined_logger + " allows to specify the logger type, level and sink\n"
"in one command."
));
- combined_logger.add_cla_id( "--", COMBINED_LOGGER, "=" );
+ combined_logger.add_cla_id( "--", btrt_combined_logger, "=" );
store.add( combined_logger );
///////////////////////////////////////////////
- rt::parameter<unsigned> random_seed( RANDOM_SEED, (
+ rt::parameter<unsigned> random_seed( btrt_random_seed, (
rt::description = "Allows to switch between sequential and random order of test units execution."
" Optionally allows to specify concrete seed for random number generator.",
rt::env_var = "BOOST_TEST_RANDOM",
rt::default_value = 0U,
rt::optional_value = 1U,
rt::value_hint = "<seed>",
- rt::help = "Parameter " + RANDOM_SEED + " instructs the framework to execute the "
+ rt::help = "Parameter " + btrt_random_seed + " instructs the framework to execute the "
"test cases in random order. This parameter accepts optional unsigned "
"integer argument. By default test cases are executed in some specific "
"order defined by order of test units in test files and dependency between "
@@ -425,12 +425,12 @@ register_parameters( rt::parameters_store& store )
"the run."
));
- random_seed.add_cla_id( "--", RANDOM_SEED, "=" );
+ random_seed.add_cla_id( "--", btrt_random_seed, "=" );
store.add( random_seed );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::output_format> report_format( REPORT_FORMAT, (
+ rt::enum_parameter<unit_test::output_format> report_format( btrt_report_format, (
rt::description = "Specifies report format.",
rt::env_var = "BOOST_TEST_REPORT_FORMAT",
rt::default_value = OF_CLF,
@@ -448,7 +448,7 @@ register_parameters( rt::parameters_store& store )
( "XML", OF_XML )
,
#endif
- rt::help = "Parameter " + REPORT_FORMAT + " allows to set the framework's report format "
+ rt::help = "Parameter " + btrt_report_format + " allows to set the framework's report format "
"to one of the formats supplied by the framework. The only acceptable values "
"for this parameter are the names of the output formats. By default the framework "
"uses human readable format (HRF) for results reporting. Alternatively you can "
@@ -456,13 +456,13 @@ register_parameters( rt::parameters_store& store )
"automation tools."
));
- report_format.add_cla_id( "--", REPORT_FORMAT, "=" );
+ report_format.add_cla_id( "--", btrt_report_format, "=" );
report_format.add_cla_id( "-", "m", " " );
store.add( report_format );
///////////////////////////////////////////////
- rt::enum_parameter<unit_test::report_level> report_level( REPORT_LEVEL, (
+ rt::enum_parameter<unit_test::report_level> report_level( btrt_report_level, (
rt::description = "Specifies report level.",
rt::env_var = "BOOST_TEST_REPORT_LEVEL",
rt::default_value = CONFIRMATION_REPORT,
@@ -482,155 +482,155 @@ register_parameters( rt::parameters_store& store )
( "no", NO_REPORT )
,
#endif
- rt::help = "Parameter " + REPORT_LEVEL + " allows to set the verbosity level of the "
+ rt::help = "Parameter " + btrt_report_level + " allows to set the verbosity level of the "
"testing result report generated by the framework. Use value 'no' to "
"eliminate the results report completely."
));
- report_level.add_cla_id( "--", REPORT_LEVEL, "=" );
+ report_level.add_cla_id( "--", btrt_report_level, "=" );
report_level.add_cla_id( "-", "r", " " );
store.add( report_level );
///////////////////////////////////////////////
- rt::parameter<std::string> report_mem_leaks( REPORT_MEM_LEAKS, (
+ rt::parameter<std::string> report_mem_leaks( btrt_report_mem_leaks, (
rt::description = "File where to report memory leaks to.",
rt::env_var = "BOOST_TEST_REPORT_MEMORY_LEAKS_TO",
rt::default_value = std::string(),
rt::value_hint = "<file name>",
- rt::help = "Parameter " + REPORT_MEM_LEAKS + " allows to specify a file where to report "
+ rt::help = "Parameter " + btrt_report_mem_leaks + " allows to specify a file where to report "
"memory leaks to. The parameter does not have default value. If it is not specified, "
"memory leaks (if any) are reported to the standard error stream."
));
- report_mem_leaks.add_cla_id( "--", REPORT_MEM_LEAKS, "=" );
+ report_mem_leaks.add_cla_id( "--", btrt_report_mem_leaks, "=" );
store.add( report_mem_leaks );
///////////////////////////////////////////////
- rt::parameter<std::string> report_sink( REPORT_SINK, (
+ rt::parameter<std::string> report_sink( btrt_report_sink, (
rt::description = "Specifies report sink: stderr(default), stdout or file name.",
rt::env_var = "BOOST_TEST_REPORT_SINK",
rt::value_hint = "<stderr|stdout|file name>",
- rt::help = "Parameter " + REPORT_SINK + " allows to set the result report sink - "
+ rt::help = "Parameter " + btrt_report_sink + " allows to set the result report sink - "
"the location where the framework writes the result report to, thus it "
"allows to easily redirect the result report to a file or a standard "
"stream. By default the testing result report is directed to the "
"standard error stream."
));
- report_sink.add_cla_id( "--", REPORT_SINK, "=" );
+ report_sink.add_cla_id( "--", btrt_report_sink, "=" );
report_sink.add_cla_id( "-", "e", " " );
store.add( report_sink );
///////////////////////////////////////////////
- rt::option result_code( RESULT_CODE, (
+ rt::option result_code( btrt_result_code, (
rt::description = "Disables test modules's result code generation.",
rt::env_var = "BOOST_TEST_RESULT_CODE",
rt::default_value = true,
- rt::help = "The 'no' argument value for the parameter " + RESULT_CODE + " instructs the "
+ rt::help = "The 'no' argument value for the parameter " + btrt_result_code + " instructs the "
"framework to always return zero result code. This can be used for test programs "
"executed within IDE. By default this parameter has value 'yes'."
));
- result_code.add_cla_id( "--", RESULT_CODE, "=", true );
+ result_code.add_cla_id( "--", btrt_result_code, "=", true );
result_code.add_cla_id( "-", "c", " " );
store.add( result_code );
///////////////////////////////////////////////
- rt::parameter<std::string,rt::REPEATABLE_PARAM> tests_to_run( RUN_FILTERS, (
+ rt::parameter<std::string,rt::REPEATABLE_PARAM> tests_to_run( btrt_run_filters, (
rt::description = "Filters, which test units to include or exclude from test module execution.",
rt::env_var = "BOOST_TEST_RUN_FILTERS",
rt::value_hint = "<test unit filter>",
- rt::help = "Parameter " + RUN_FILTERS + " allows to filter which test units to execute during "
+ rt::help = "Parameter " + btrt_run_filters + " allows to filter which test units to execute during "
"testing. The framework supports both 'selection filters', which allow to select "
"which test units to enable from the set of available test units, and 'disabler "
"filters', which allow to disable some test units. The __UTF__ also supports "
"enabling/disabling test units at compile time. These settings identify the default "
- "set of test units to run. Parameter " + RUN_FILTERS + " is used to change this default. "
+ "set of test units to run. Parameter " + btrt_run_filters + " is used to change this default. "
"This parameter is repeatable, so you can specify more than one filter if necessary."
));
- tests_to_run.add_cla_id( "--", RUN_FILTERS, "=" );
+ tests_to_run.add_cla_id( "--", btrt_run_filters, "=" );
tests_to_run.add_cla_id( "-", "t", " " );
store.add( tests_to_run );
///////////////////////////////////////////////
- rt::option save_test_pattern( SAVE_TEST_PATTERN, (
+ rt::option save_test_pattern( btrt_save_test_pattern, (
rt::description = "Allows to switch between saving or matching test pattern file.",
rt::env_var = "BOOST_TEST_SAVE_PATTERN",
- rt::help = "Parameter " + SAVE_TEST_PATTERN + " facilitates switching mode of operation for "
+ rt::help = "Parameter " + btrt_save_test_pattern + " facilitates switching mode of operation for "
"testing output streams.\n\nThis parameter serves no particular purpose within the "
"framework itself. It can be used by test modules relying on output_test_stream to "
"implement testing logic. Default mode is 'match' (false)."
));
- save_test_pattern.add_cla_id( "--", SAVE_TEST_PATTERN, "=" );
+ save_test_pattern.add_cla_id( "--", btrt_save_test_pattern, "=" );
store.add( save_test_pattern );
///////////////////////////////////////////////
- rt::option show_progress( SHOW_PROGRESS, (
+ rt::option show_progress( btrt_show_progress, (
rt::description = "Turns on progress display.",
rt::env_var = "BOOST_TEST_SHOW_PROGRESS",
- rt::help = "Parameter " + SHOW_PROGRESS + " instructs the framework to display test progress "
+ rt::help = "Parameter " + btrt_show_progress + " instructs the framework to display test progress "
"information. By default the test progress is not shown."
));
- show_progress.add_cla_id( "--", SHOW_PROGRESS, "=" );
+ show_progress.add_cla_id( "--", btrt_show_progress, "=" );
show_progress.add_cla_id( "-", "p", " " );
store.add( show_progress );
///////////////////////////////////////////////
- rt::option use_alt_stack( USE_ALT_STACK, (
+ rt::option use_alt_stack( btrt_use_alt_stack, (
rt::description = "Turns on/off usage of an alternative stack for signal handling.",
rt::env_var = "BOOST_TEST_USE_ALT_STACK",
rt::default_value = true,
- rt::help = "Parameter " + USE_ALT_STACK + " instructs the framework to use alternative "
+ rt::help = "Parameter " + btrt_use_alt_stack + " instructs the framework to use alternative "
"stack for signals processing, on platforms where they are supported. The feature "
"is enabled by default, but can be disabled using this parameter."
));
- use_alt_stack.add_cla_id( "--", USE_ALT_STACK, "=", true );
+ use_alt_stack.add_cla_id( "--", btrt_use_alt_stack, "=", true );
store.add( use_alt_stack );
///////////////////////////////////////////////
- rt::option wait_for_debugger( WAIT_FOR_DEBUGGER, (
+ rt::option wait_for_debugger( btrt_wait_for_debugger, (
rt::description = "Forces test module to wait for button to be pressed before starting test run.",
rt::env_var = "BOOST_TEST_WAIT_FOR_DEBUGGER",
- rt::help = "Parameter " + WAIT_FOR_DEBUGGER + " instructs the framework to pause before starting "
+ rt::help = "Parameter " + btrt_wait_for_debugger + " instructs the framework to pause before starting "
"test units execution, so that you can attach a debugger to running test module. By "
"default this parameters turned off."
));
- wait_for_debugger.add_cla_id( "--", WAIT_FOR_DEBUGGER, "=" );
+ wait_for_debugger.add_cla_id( "--", btrt_wait_for_debugger, "=" );
wait_for_debugger.add_cla_id( "-", "w", " " );
store.add( wait_for_debugger );
///////////////////////////////////////////////
- rt::parameter<std::string> help( HELP, (
+ rt::parameter<std::string> help( btrt_help, (
rt::description = "Help for framework parameters.",
rt::optional_value = std::string(),
rt::value_hint = "<parameter name>",
- rt::help = "Parameter " + HELP + " displays help on the framework's parameters. "
+ rt::help = "Parameter " + btrt_help + " displays help on the framework's parameters. "
"The parameter accepts an optional argument value. If present, an argument value is "
"interpreted as a parameter name (name guessing works as well, so for example "
"--help=rand displays help on the parameter random). If the parameter name is unknown "
"or ambiguous error is reported. If argument value is absent, a summary of all "
"framework's parameter is displayed."
));
- help.add_cla_id( "--", HELP, "=" );
+ help.add_cla_id( "--", btrt_help, "=" );
store.add( help );
///////////////////////////////////////////////
- rt::option usage( USAGE, (
+ rt::option usage( btrt_usage, (
rt::description = "Short message explaining usage of Boost.Test parameters."
));
usage.add_cla_id( "-", "?", " " );
@@ -638,10 +638,10 @@ register_parameters( rt::parameters_store& store )
///////////////////////////////////////////////
- rt::option version( VERSION, (
+ rt::option version( btrt_version, (
rt::description = "Prints Boost.Test version and exits."
));
- version.add_cla_id( "--", VERSION, " " );
+ version.add_cla_id( "--", btrt_version, " " );
store.add( version );
}
@@ -676,24 +676,24 @@ init( int& argc, char** argv )
rt::finalize_arguments( s_parameters_store, s_arguments_store );
// Report help if requested
- if( runtime_config::get<bool>( VERSION ) ) {
+ if( runtime_config::get<bool>( btrt_version ) ) {
parser->version( std::cerr );
BOOST_TEST_I_THROW( framework::nothing_to_test( boost::exit_success ) );
}
- else if( runtime_config::get<bool>( USAGE ) ) {
+ else if( runtime_config::get<bool>( btrt_usage ) ) {
parser->usage( std::cerr );
BOOST_TEST_I_THROW( framework::nothing_to_test( boost::exit_success ) );
}
- else if( s_arguments_store.has( HELP ) ) {
- parser->help( std::cerr, s_parameters_store, runtime_config::get<std::string>( HELP ) );
+ else if( s_arguments_store.has( btrt_help ) ) {
+ parser->help( std::cerr, s_parameters_store, runtime_config::get<std::string>( btrt_help ) );
BOOST_TEST_I_THROW( framework::nothing_to_test( boost::exit_success ) );
}
// A bit of business logic: output_format takes precedence over log/report formats
- if( s_arguments_store.has( OUTPUT_FORMAT ) ) {
- unit_test::output_format of = s_arguments_store.get<unit_test::output_format>( OUTPUT_FORMAT );
- s_arguments_store.set( REPORT_FORMAT, of );
- s_arguments_store.set( LOG_FORMAT, of );
+ if( s_arguments_store.has( btrt_output_format ) ) {
+ unit_test::output_format of = s_arguments_store.get<unit_test::output_format>( btrt_output_format );
+ s_arguments_store.set( btrt_report_format, of );
+ s_arguments_store.set( btrt_log_format, of );
}
}
@@ -747,7 +747,7 @@ argument_store()
bool
save_pattern()
{
- return runtime_config::get<bool>( SAVE_TEST_PATTERN );
+ return runtime_config::get<bool>( btrt_save_test_pattern );
}
//____________________________________________________________________________//