OLD | NEW |
(Empty) | |
| 1 # Enable the tests |
| 2 |
| 3 find_package(Threads REQUIRED) |
| 4 |
| 5 # NOTE: Some tests use `<cassert>` to perform the test. Therefore we must |
| 6 # strip -DNDEBUG from the default CMake flags in DEBUG mode. |
| 7 string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) |
| 8 if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" ) |
| 9 add_definitions( -UNDEBUG ) |
| 10 add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) |
| 11 # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines. |
| 12 foreach (flags_var_to_scrub |
| 13 CMAKE_CXX_FLAGS_RELEASE |
| 14 CMAKE_CXX_FLAGS_RELWITHDEBINFO |
| 15 CMAKE_CXX_FLAGS_MINSIZEREL |
| 16 CMAKE_C_FLAGS_RELEASE |
| 17 CMAKE_C_FLAGS_RELWITHDEBINFO |
| 18 CMAKE_C_FLAGS_MINSIZEREL) |
| 19 string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " " |
| 20 "${flags_var_to_scrub}" "${${flags_var_to_scrub}}") |
| 21 endforeach() |
| 22 endif() |
| 23 |
| 24 # NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise |
| 25 # they will break the configuration check. |
| 26 if (DEFINED BENCHMARK_CXX_LINKER_FLAGS) |
| 27 list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}) |
| 28 endif() |
| 29 |
| 30 add_library(output_test_helper STATIC output_test_helper.cc output_test.h) |
| 31 |
| 32 macro(compile_benchmark_test name) |
| 33 add_executable(${name} "${name}.cc") |
| 34 target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT}) |
| 35 endmacro(compile_benchmark_test) |
| 36 |
| 37 |
| 38 macro(compile_output_test name) |
| 39 add_executable(${name} "${name}.cc" output_test.h) |
| 40 target_link_libraries(${name} output_test_helper benchmark |
| 41 ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) |
| 42 endmacro(compile_output_test) |
| 43 |
| 44 |
| 45 # Demonstration executable |
| 46 compile_benchmark_test(benchmark_test) |
| 47 add_test(benchmark benchmark_test --benchmark_min_time=0.01) |
| 48 |
| 49 compile_benchmark_test(filter_test) |
| 50 macro(add_filter_test name filter expect) |
| 51 add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${fi
lter} ${expect}) |
| 52 add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filt
er=${filter} ${expect}) |
| 53 endmacro(add_filter_test) |
| 54 |
| 55 add_filter_test(filter_simple "Foo" 3) |
| 56 add_filter_test(filter_suffix "BM_.*" 4) |
| 57 add_filter_test(filter_regex_all ".*" 5) |
| 58 add_filter_test(filter_regex_blank "" 5) |
| 59 add_filter_test(filter_regex_none "monkey" 0) |
| 60 add_filter_test(filter_regex_wildcard ".*Foo.*" 3) |
| 61 add_filter_test(filter_regex_begin "^BM_.*" 4) |
| 62 add_filter_test(filter_regex_begin2 "^N" 1) |
| 63 add_filter_test(filter_regex_end ".*Ba$" 1) |
| 64 |
| 65 compile_benchmark_test(options_test) |
| 66 add_test(options_benchmarks options_test --benchmark_min_time=0.01) |
| 67 |
| 68 compile_benchmark_test(basic_test) |
| 69 add_test(basic_benchmark basic_test --benchmark_min_time=0.01) |
| 70 |
| 71 compile_benchmark_test(diagnostics_test) |
| 72 add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01) |
| 73 |
| 74 compile_benchmark_test(skip_with_error_test) |
| 75 add_test(skip_with_error_test skip_with_error_test --benchmark_min_time=0.01) |
| 76 |
| 77 compile_benchmark_test(donotoptimize_test) |
| 78 add_test(donotoptimize_test donotoptimize_test --benchmark_min_time=0.01) |
| 79 |
| 80 compile_benchmark_test(fixture_test) |
| 81 add_test(fixture_test fixture_test --benchmark_min_time=0.01) |
| 82 |
| 83 compile_benchmark_test(register_benchmark_test) |
| 84 add_test(register_benchmark_test register_benchmark_test --benchmark_min_time=0.
01) |
| 85 |
| 86 compile_benchmark_test(map_test) |
| 87 add_test(map_test map_test --benchmark_min_time=0.01) |
| 88 |
| 89 compile_benchmark_test(multiple_ranges_test) |
| 90 add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01) |
| 91 |
| 92 compile_output_test(reporter_output_test) |
| 93 add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01) |
| 94 |
| 95 compile_output_test(user_counters_test) |
| 96 add_test(user_counters_test user_counters_test --benchmark_min_time=0.01) |
| 97 |
| 98 compile_output_test(user_counters_tabular_test) |
| 99 add_test(user_counters_tabular_test user_counters_tabular_test --benchmark_count
ers_tabular=true --benchmark_min_time=0.01) |
| 100 |
| 101 check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG) |
| 102 if (BENCHMARK_HAS_CXX03_FLAG) |
| 103 set(CXX03_FLAGS "${CMAKE_CXX_FLAGS}") |
| 104 string(REPLACE "-std=c++11" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}") |
| 105 string(REPLACE "-std=c++0x" "-std=c++03" CXX03_FLAGS "${CXX03_FLAGS}") |
| 106 |
| 107 compile_benchmark_test(cxx03_test) |
| 108 set_target_properties(cxx03_test |
| 109 PROPERTIES COMPILE_FLAGS "${CXX03_FLAGS}") |
| 110 add_test(cxx03 cxx03_test --benchmark_min_time=0.01) |
| 111 endif() |
| 112 |
| 113 # Attempt to work around flaky test failures when running on Appveyor servers. |
| 114 if (DEFINED ENV{APPVEYOR}) |
| 115 set(COMPLEXITY_MIN_TIME "0.5") |
| 116 else() |
| 117 set(COMPLEXITY_MIN_TIME "0.01") |
| 118 endif() |
| 119 compile_output_test(complexity_test) |
| 120 add_test(complexity_benchmark complexity_test --benchmark_min_time=${COMPLEXITY_
MIN_TIME}) |
| 121 |
| 122 # Add the coverage command(s) |
| 123 if(CMAKE_BUILD_TYPE) |
| 124 string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER) |
| 125 endif() |
| 126 if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage") |
| 127 find_program(GCOV gcov) |
| 128 find_program(LCOV lcov) |
| 129 find_program(GENHTML genhtml) |
| 130 find_program(CTEST ctest) |
| 131 if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE) |
| 132 add_custom_command( |
| 133 OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html |
| 134 COMMAND ${LCOV} -q -z -d . |
| 135 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o befor
e.lcov -i |
| 136 COMMAND ${CTEST} --force-new-ctest-process |
| 137 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after
.lcov |
| 138 COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov |
| 139 COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.l
cov |
| 140 COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BI
NARY_DIR}" -t benchmark |
| 141 DEPENDS filter_test benchmark_test options_test basic_test fixture_test cx
x03_test complexity_test |
| 142 WORKING_DIRECTORY ${CMAKE_BINARY_DIR} |
| 143 COMMENT "Running LCOV" |
| 144 ) |
| 145 add_custom_target(coverage |
| 146 DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html |
| 147 COMMENT "LCOV report at lcov/index.html" |
| 148 ) |
| 149 message(STATUS "Coverage command added") |
| 150 else() |
| 151 if (HAVE_CXX_FLAG_COVERAGE) |
| 152 set(CXX_FLAG_COVERAGE_MESSAGE supported) |
| 153 else() |
| 154 set(CXX_FLAG_COVERAGE_MESSAGE unavailable) |
| 155 endif() |
| 156 message(WARNING |
| 157 "Coverage not available:\n" |
| 158 " gcov: ${GCOV}\n" |
| 159 " lcov: ${LCOV}\n" |
| 160 " genhtml: ${GENHTML}\n" |
| 161 " ctest: ${CTEST}\n" |
| 162 " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}") |
| 163 endif() |
| 164 endif() |
OLD | NEW |