|
194 | 194 | our $opt_debug_server;
|
195 | 195 | our @opt_cases; # The test cases names in argv
|
196 | 196 | our $opt_embedded_server;
|
| 197 | +# -1 indicates use default, override with env.var. |
| 198 | +my $opt_ctest= env_or_val(MTR_UNIT_TESTS => -1); |
| 199 | +# Unit test report stored here for delayed printing |
| 200 | +my $ctest_report; |
197 | 201 |
|
198 | 202 | # Options used when connecting to an already running server
|
199 | 203 | my %opts_extern;
|
@@ -493,13 +497,19 @@ sub main {
|
493 | 497 | mtr_error("Not all tests completed");
|
494 | 498 | }
|
495 | 499 |
|
| 500 | + mark_time_used('init'); |
| 501 | + |
| 502 | + push @$completed, run_ctest() if $opt_ctest; |
| 503 | + |
496 | 504 | mtr_print_line();
|
497 | 505 |
|
498 | 506 | if ( $opt_gcov ) {
|
499 | 507 | gcov_collect($basedir, $opt_gcov_exe,
|
500 | 508 | $opt_gcov_msg, $opt_gcov_err);
|
501 | 509 | }
|
502 | 510 |
|
| 511 | + print "$ctest_report\n" if $ctest_report; |
| 512 | + |
503 | 513 | print_total_times($opt_parallel) if $opt_report_times;
|
504 | 514 |
|
505 | 515 | mtr_report_stats("Completed", $completed);
|
@@ -1055,6 +1065,7 @@ sub command_line_setup {
|
1055 | 1065 | 'max-connections=i' => \$opt_max_connections,
|
1056 | 1066 | 'default-myisam!' => \&collect_option,
|
1057 | 1067 | 'report-times' => \$opt_report_times,
|
| 1068 | + 'unit-tests!' => \$opt_ctest, |
1058 | 1069 |
|
1059 | 1070 | 'help|h' => \$opt_usage,
|
1060 | 1071 | # list-options is internal, not listed in help
|
@@ -1484,6 +1495,14 @@ sub command_line_setup {
|
1484 | 1495 | if $opt_suites || @opt_cases;
|
1485 | 1496 | }
|
1486 | 1497 |
|
| 1498 | + # -------------------------------------------------------------------------- |
| 1499 | + # Don't run ctest if tests or suites named |
| 1500 | + # -------------------------------------------------------------------------- |
| 1501 | + |
| 1502 | + $opt_ctest= 0 if $opt_ctest == -1 && ($opt_suites || @opt_cases); |
| 1503 | + # Override: disable if running in the PB test environment |
| 1504 | + $opt_ctest= 0 if $opt_ctest == -1 && defined $ENV{PB2WORKDIR}; |
| 1505 | + |
1487 | 1506 | # --------------------------------------------------------------------------
|
1488 | 1507 | # Check use of wait-all
|
1489 | 1508 | # --------------------------------------------------------------------------
|
@@ -5653,6 +5672,73 @@ ()
|
5653 | 5672 | return $found_err;
|
5654 | 5673 | }
|
5655 | 5674 |
|
| 5675 | +sub run_ctest() { |
| 5676 | + my $olddir= getcwd(); |
| 5677 | + chdir ($bindir) or die ("Could not chdir to $bindir"); |
| 5678 | + my $tinfo; |
| 5679 | + my $no_ctest= (IS_WINDOWS) ? 256 : -1; |
| 5680 | + |
| 5681 | + # Just ignore if not configured/built to run ctest |
| 5682 | + if (! -f "CTestTestfile.cmake") { |
| 5683 | + chdir($olddir); |
| 5684 | + return; |
| 5685 | + } |
| 5686 | + |
| 5687 | + # Also silently ignore if we don't have ctest and didn't insist |
| 5688 | + # Now, run ctest and collect output |
| 5689 | + my $ctest_out= `ctest 2>&1`; |
| 5690 | + if ($? == $no_ctest && $opt_ctest == -1) { |
| 5691 | + chdir($olddir); |
| 5692 | + return; |
| 5693 | + } |
| 5694 | + |
| 5695 | + # Create minimalistic "test" for the reporting |
| 5696 | + $tinfo = My::Test->new |
| 5697 | + ( |
| 5698 | + name => 'unit_tests', |
| 5699 | + ); |
| 5700 | + # Set dummy worker id to align report with normal tests |
| 5701 | + $tinfo->{worker} = 0 if $opt_parallel > 1; |
| 5702 | + |
| 5703 | + my $ctfail= 0; # Did ctest fail? |
| 5704 | + if ($?) { |
| 5705 | + $ctfail= 1; |
| 5706 | + $tinfo->{result}= 'MTR_RES_FAILED'; |
| 5707 | + $tinfo->{comment}= "ctest failed with exit code $?, see result below"; |
| 5708 | + $ctest_out= "" unless $ctest_out; |
| 5709 | + } |
| 5710 | + my $ctfile= "$opt_vardir/ctest.log"; |
| 5711 | + my $ctres= 0; # Did ctest produce report summary? |
| 5712 | + |
| 5713 | + open (CTEST, " > $ctfile") or die ("Could not open output file $ctfile"); |
| 5714 | + |
| 5715 | + # Put ctest output in log file, while analyzing results |
| 5716 | + for (split ('\n', $ctest_out)) { |
| 5717 | + print CTEST "$_\n"; |
| 5718 | + if (/tests passed/) { |
| 5719 | + $ctres= 1; |
| 5720 | + $ctest_report .= "\nUnit tests: $_\n"; |
| 5721 | + } |
| 5722 | + if ( /FAILED/ or /\(Failed\)/ ) { |
| 5723 | + $ctfail= 1; |
| 5724 | + $ctest_report .= " $_\n"; |
| 5725 | + } |
| 5726 | + } |
| 5727 | + close CTEST; |
| 5728 | + |
| 5729 | + # Set needed 'attributes' for test reporting |
| 5730 | + $tinfo->{comment}.= "\nctest did not pruduce report summary" if ! $ctres; |
| 5731 | + $tinfo->{result}= ($ctres && !$ctfail) |
| 5732 | + ? 'MTR_RES_PASSED' : 'MTR_RES_FAILED'; |
| 5733 | + $ctest_report .= "Report from unit tests in $ctfile\n"; |
| 5734 | + $tinfo->{failures}= ($tinfo->{result} eq 'MTR_RES_FAILED'); |
| 5735 | + |
| 5736 | + mark_time_used('test'); |
| 5737 | + mtr_report_test($tinfo); |
| 5738 | + chdir($olddir); |
| 5739 | + return $tinfo; |
| 5740 | +} |
| 5741 | + |
5656 | 5742 | #
|
5657 | 5743 | # Usage
|
5658 | 5744 | #
|
@@ -5871,6 +5957,9 @@ ($)
|
5871 | 5957 | engine to InnoDB.
|
5872 | 5958 | report-times Report how much time has been spent on different
|
5873 | 5959 | phases of test execution.
|
| 5960 | + nounit-tests Do not run unit tests. Normally run if configured |
| 5961 | + and if not running named tests/suites |
| 5962 | + unit-tests Run unit tests even if they would otherwise not be run |
5874 | 5963 |
|
5875 | 5964 | Some options that control enabling a feature for normal test runs,
|
5876 | 5965 | can be turned off by prepending 'no' to the option, e.g. --notimer.
|
|
0 commit comments