summaryrefslogtreecommitdiffstatshomepage
path: root/core/scripts/run-tests.sh
diff options
context:
space:
mode:
Diffstat (limited to 'core/scripts/run-tests.sh')
-rwxr-xr-xcore/scripts/run-tests.sh222
1 files changed, 146 insertions, 76 deletions
diff --git a/core/scripts/run-tests.sh b/core/scripts/run-tests.sh
index 4545a00cc3a..96f16eed9c7 100755
--- a/core/scripts/run-tests.sh
+++ b/core/scripts/run-tests.sh
@@ -17,14 +17,13 @@ use Drupal\Component\Utility\Timer;
use Drupal\Core\Composer\Composer;
use Drupal\Core\Database\Database;
use Drupal\Core\Test\EnvironmentCleaner;
+use Drupal\Core\Test\PhpUnitTestDiscovery;
use Drupal\Core\Test\PhpUnitTestRunner;
use Drupal\Core\Test\SimpletestTestRunResultsStorage;
-use Drupal\Core\Test\RunTests\TestFileParser;
use Drupal\Core\Test\TestDatabase;
use Drupal\Core\Test\TestRun;
use Drupal\Core\Test\TestRunnerKernel;
use Drupal\Core\Test\TestRunResultsStorageInterface;
-use Drupal\Core\Test\TestDiscovery;
use Drupal\BuildTests\Framework\BuildTestBase;
use Drupal\FunctionalJavascriptTests\WebDriverTestBase;
use Drupal\KernelTests\KernelTestBase;
@@ -49,6 +48,10 @@ const SIMPLETEST_SCRIPT_COLOR_EXCEPTION = 33;
const SIMPLETEST_SCRIPT_COLOR_YELLOW = 33;
// A refreshing cyan.
const SIMPLETEST_SCRIPT_COLOR_CYAN = 36;
+// A fainting gray.
+const SIMPLETEST_SCRIPT_COLOR_GRAY = 90;
+// A notable white.
+const SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE = "1;97";
// Restricting the chunk of queries prevents memory exhaustion.
const SIMPLETEST_SCRIPT_SQLITE_VARIABLE_LIMIT = 350;
@@ -86,12 +89,12 @@ if ($args['list']) {
// Display all available tests organized by one @group annotation.
echo "\nAvailable test groups & classes\n";
echo "-------------------------------\n\n";
- $test_discovery = new TestDiscovery(
- \Drupal::root(),
- \Drupal::service('class_loader')
- );
+ $test_discovery = new PhpUnitTestDiscovery(\Drupal::root() . \DIRECTORY_SEPARATOR . 'core');
try {
$groups = $test_discovery->getTestClasses($args['module']);
+ foreach ($test_discovery->getWarnings() as $warning) {
+ simpletest_script_print($warning . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
+ }
}
catch (Exception $e) {
error_log((string) $e);
@@ -119,11 +122,8 @@ if ($args['list']) {
// @see https://www.drupal.org/node/2569585
if ($args['list-files'] || $args['list-files-json']) {
// List all files which could be run as tests.
- $test_discovery = new TestDiscovery(
- \Drupal::root(),
- \Drupal::service('class_loader')
- );
- // TestDiscovery::findAllClassFiles() gives us a classmap similar to a
+ $test_discovery = new PhpUnitTestDiscovery(\Drupal::root() . \DIRECTORY_SEPARATOR . 'core');
+ // PhpUnitTestDiscovery::findAllClassFiles() gives us a classmap similar to a
// Composer 'classmap' array.
$test_classes = $test_discovery->findAllClassFiles();
// JSON output is the easiest.
@@ -177,6 +177,15 @@ if (!Composer::upgradePHPUnitCheck(Version::id())) {
exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
}
+echo "\n";
+echo "Drupal test run\n\n";
+echo sprintf("Drupal Version: %s\n", \Drupal::VERSION);
+echo sprintf("PHP Version: %s\n", \PHP_VERSION);
+echo sprintf("PHP Binary: %s\n", $php ?? getenv('_'));
+echo sprintf("PHPUnit Version: %s\n", Version::id());
+echo "-------------------------------\n";
+echo "\n";
+
$test_list = simpletest_script_get_test_list();
// Try to allocate unlimited time to run the tests.
@@ -355,6 +364,11 @@ All arguments are long options.
The index of the job in the job set.
+ --debug-discovery
+
+ If provided, dumps detailed information on the tests selected
+ for execution, before the execution starts.
+
<test1>[,<test2>[,<test3> ...]]
One or more tests to be run. By default, these are interpreted
@@ -427,6 +441,7 @@ function simpletest_script_parse_args() {
'non-html' => FALSE,
'ci-parallel-node-index' => 1,
'ci-parallel-node-total' => 1,
+ 'debug-discovery' => FALSE,
];
// Override with set values.
@@ -919,17 +934,15 @@ function simpletest_script_command(TestRun $test_run, string $test_class): array
function simpletest_script_get_test_list() {
global $args;
- $test_discovery = new TestDiscovery(
- \Drupal::root(),
- \Drupal::service('class_loader')
- );
- $types_processed = empty($args['types']);
+ $test_discovery = new PhpUnitTestDiscovery(\Drupal::root() . \DIRECTORY_SEPARATOR . 'core');
$test_list = [];
$slow_tests = [];
if ($args['all'] || $args['module'] || $args['directory']) {
try {
$groups = $test_discovery->getTestClasses($args['module'], $args['types'], $args['directory']);
- $types_processed = TRUE;
+ foreach ($test_discovery->getWarnings() as $warning) {
+ simpletest_script_print($warning . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
+ }
}
catch (Exception $e) {
echo (string) $e;
@@ -938,30 +951,34 @@ function simpletest_script_get_test_list() {
// Ensure that tests marked explicitly as @group #slow are run at the
// beginning of each job.
if (key($groups) === '#slow') {
- $slow_tests = array_keys(array_shift($groups));
+ $slow_tests = array_shift($groups);
}
$not_slow_tests = [];
foreach ($groups as $group => $tests) {
- $not_slow_tests = array_merge($not_slow_tests, array_keys($tests));
+ $not_slow_tests = array_merge($not_slow_tests, $tests);
}
// Filter slow tests out of the not slow tests and ensure a unique list
// since tests may appear in more than one group.
- $not_slow_tests = array_unique(array_diff($not_slow_tests, $slow_tests));
+ $not_slow_tests = array_diff_key($not_slow_tests, $slow_tests);
// If the tests are not being run in parallel, then ensure slow tests run
// all together first.
if ((int) $args['ci-parallel-node-total'] <= 1 ) {
sort_tests_by_type_and_methods($slow_tests);
sort_tests_by_type_and_methods($not_slow_tests);
- $test_list = array_merge($slow_tests, $not_slow_tests);
+ $all_tests_list = array_merge($slow_tests, $not_slow_tests);
+ assign_tests_sequence($all_tests_list);
+ dump_tests_sequence($all_tests_list, $args);
+ $test_list = array_keys($all_tests_list);
}
else {
- // Sort all tests by the number of public methods on the test class.
- // This is a proxy for the approximate time taken to run the test,
- // which is used in combination with @group #slow to start the slowest tests
- // first and distribute tests between test runners.
+ // Sort all tests by the number of test cases on the test class.
+ // This is used in combination with @group #slow to start the slowest
+ // tests first and distribute tests between test runners.
sort_tests_by_public_method_count($slow_tests);
sort_tests_by_public_method_count($not_slow_tests);
+ $all_tests_list = array_merge($slow_tests, $not_slow_tests);
+ assign_tests_sequence($all_tests_list);
// Now set up a bin per test runner.
$bin_count = (int) $args['ci-parallel-node-total'];
@@ -975,6 +992,8 @@ function simpletest_script_get_test_list() {
$binned_other_tests = place_tests_into_bins($not_slow_tests, $bin_count);
$other_tests_for_job = $binned_other_tests[$args['ci-parallel-node-index'] - 1];
$test_list = array_merge($slow_tests_for_job, $other_tests_for_job);
+ dump_bin_tests_sequence($args['ci-parallel-node-index'], $all_tests_list, $test_list, $args);
+ $test_list = array_keys($test_list);
}
}
else {
@@ -988,6 +1007,9 @@ function simpletest_script_get_test_list() {
else {
try {
$groups = $test_discovery->getTestClasses(NULL, $args['types']);
+ foreach ($test_discovery->getWarnings() as $warning) {
+ simpletest_script_print($warning . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
+ }
}
catch (Exception $e) {
echo (string) $e;
@@ -1005,19 +1027,24 @@ function simpletest_script_get_test_list() {
}
elseif ($args['file']) {
// Extract test case class names from specified files.
- $parser = new TestFileParser();
foreach ($args['test_names'] as $file) {
- if (!file_exists($file)) {
+ if (!file_exists($file) || is_dir($file)) {
simpletest_script_print_error('File not found: ' . $file);
exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
}
- $test_list = array_merge($test_list, $parser->getTestListFromFile($file));
+ $fileTests = current($test_discovery->getTestClasses(NULL, [], $file));
+ $test_list = array_merge($test_list, $fileTests);
}
+ assign_tests_sequence($test_list);
+ dump_tests_sequence($test_list, $args);
+ $test_list = array_keys($test_list);
}
else {
try {
$groups = $test_discovery->getTestClasses(NULL, $args['types']);
- $types_processed = TRUE;
+ foreach ($test_discovery->getWarnings() as $warning) {
+ simpletest_script_print($warning . "\n", SIMPLETEST_SCRIPT_COLOR_EXCEPTION);
+ }
}
catch (Exception $e) {
echo (string) $e;
@@ -1034,22 +1061,15 @@ function simpletest_script_get_test_list() {
}
// Merge the tests from the groups together.
foreach ($args['test_names'] as $group_name) {
- $test_list = array_merge($test_list, array_keys($groups[$group_name]));
+ $test_list = array_merge($test_list, $groups[$group_name]);
}
+ assign_tests_sequence($test_list);
+ dump_tests_sequence($test_list, $args);
// Ensure our list of tests contains only one entry for each test.
- $test_list = array_unique($test_list);
+ $test_list = array_keys($test_list);
}
}
- // If the test list creation does not automatically limit by test type then
- // we need to do so here.
- if (!$types_processed) {
- $test_list = array_filter($test_list, function ($test_class) use ($args) {
- $test_info = TestDiscovery::getTestInfo($test_class);
- return in_array($test_info['type'], $args['types'], TRUE);
- });
- }
-
if (empty($test_list)) {
simpletest_script_print_error('No valid tests were specified.');
exit(SIMPLETEST_SCRIPT_EXIT_FAILURE);
@@ -1062,11 +1082,11 @@ function simpletest_script_get_test_list() {
* Sort tests by test type and number of public methods.
*/
function sort_tests_by_type_and_methods(array &$tests): void {
- usort($tests, function ($a, $b) {
- if (get_test_type_weight($a) === get_test_type_weight($b)) {
- return get_test_class_method_count($b) <=> get_test_class_method_count($a);
+ uasort($tests, function ($a, $b) {
+ if (get_test_type_weight($a['name']) === get_test_type_weight($b['name'])) {
+ return $b['tests_count'] <=> $a['tests_count'];
}
- return get_test_type_weight($b) <=> get_test_type_weight($a);
+ return get_test_type_weight($b['name']) <=> get_test_type_weight($a['name']);
});
}
@@ -1083,8 +1103,8 @@ function sort_tests_by_type_and_methods(array &$tests): void {
* An array of test class names.
*/
function sort_tests_by_public_method_count(array &$tests): void {
- usort($tests, function ($a, $b) {
- return get_test_class_method_count($b) <=> get_test_class_method_count($a);
+ uasort($tests, function ($a, $b) {
+ return $b['tests_count'] <=> $a['tests_count'];
});
}
@@ -1105,28 +1125,46 @@ function get_test_type_weight(string $class): int {
}
/**
- * Get an approximate test method count for a test class.
+ * Assigns the test sequence.
*
- * @param string $class
- * The test class name.
+ * @param array $tests
+ * The array of test class info.
*/
-function get_test_class_method_count(string $class): int {
- $reflection = new \ReflectionClass($class);
- $count = 0;
- foreach ($reflection->getMethods(\ReflectionMethod::IS_PUBLIC) as $method) {
- // If a method uses a dataProvider, increase the count by 20 since data
- // providers result in a single method running multiple times.
- $comments = $method->getDocComment();
- preg_match_all('#@(.*?)\n#s', $comments, $annotations);
- foreach ($annotations[1] as $annotation) {
- if (str_starts_with($annotation, 'dataProvider')) {
- $count = $count + 20;
- continue;
- }
- }
- $count++;
+function assign_tests_sequence(array &$tests): void {
+ $i = 0;
+ foreach ($tests as &$testInfo) {
+ $testInfo['sequence'] = ++$i;
}
- return $count;
+}
+
+/**
+ * Dumps the list of tests in order of execution after sorting.
+ *
+ * @param array $tests
+ * The array of test class info.
+ * @param array $args
+ * The command line arguments.
+ */
+function dump_tests_sequence(array $tests, array $args): void {
+ if ($args['debug-discovery'] === FALSE) {
+ return;
+ }
+ echo "Test execution sequence\n";
+ echo "-----------------------\n\n";
+ echo " Seq Slow? Group Cnt Class\n";
+ echo "-----------------------------------------\n";
+ $i = 0;
+ foreach ($tests as $testInfo) {
+ echo sprintf(
+ "%4d %5s %15s %4d %s\n",
+ $testInfo['sequence'],
+ in_array('#slow', $testInfo['groups']) ? '#slow' : '',
+ trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
+ $testInfo['tests_count'],
+ trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
+ );
+ }
+ echo "-----------------------------------------\n\n";
}
/**
@@ -1149,13 +1187,51 @@ function place_tests_into_bins(array $tests, int $bin_count) {
// Create a bin corresponding to each parallel test job.
$bins = array_fill(0, $bin_count, []);
// Go through each test and add them to one bin at a time.
+ $i = 0;
foreach ($tests as $key => $test) {
- $bins[($key % $bin_count)][] = $test;
+ $bins[($i++ % $bin_count)][$key] = $test;
}
return $bins;
}
/**
+ * Dumps the list of tests in order of execution for a bin.
+ *
+ * @param int $bin
+ * The bin.
+ * @param array $allTests
+ * The list of all test classes discovered.
+ * @param array $tests
+ * The list of test class to run for this bin.
+ * @param array $args
+ * The command line arguments.
+ */
+function dump_bin_tests_sequence(int $bin, array $allTests, array $tests, array $args): void {
+ if ($args['debug-discovery'] === FALSE) {
+ return;
+ }
+ echo "Test execution sequence. ";
+ echo "Tests marked *** will be executed in this PARALLEL BIN #{$bin}.\n";
+ echo "-------------------------------------------------------------------------------------\n\n";
+ echo "Bin Seq Slow? Group Cnt Class\n";
+ echo "--------------------------------------------\n";
+ foreach ($allTests as $testInfo) {
+ $inBin = isset($tests[$testInfo['name']]);
+ $message = sprintf(
+ "%s %4d %5s %15s %4d %s\n",
+ $inBin ? "***" : " ",
+ $testInfo['sequence'],
+ in_array('#slow', $testInfo['groups']) ? '#slow' : '',
+ trim_with_ellipsis($testInfo['group'], 15, \STR_PAD_RIGHT),
+ $testInfo['tests_count'],
+ trim_with_ellipsis($testInfo['name'], 60, \STR_PAD_LEFT),
+ );
+ simpletest_script_print($message, $inBin ? SIMPLETEST_SCRIPT_COLOR_BRIGHT_WHITE : SIMPLETEST_SCRIPT_COLOR_GRAY);
+ }
+ echo "-------------------------------------------------\n\n";
+}
+
+/**
* Initialize the reporter.
*/
function simpletest_script_reporter_init(): void {
@@ -1170,12 +1246,6 @@ function simpletest_script_reporter_init(): void {
'debug' => 'Log',
];
- echo "\n";
- echo "Drupal test run\n";
- echo "Using PHP Binary: $php\n";
- echo "---------------\n";
- echo "\n";
-
// Tell the user about what tests are to be run.
if ($args['all']) {
echo "All tests will run.\n\n";
@@ -1378,7 +1448,7 @@ function simpletest_script_reporter_display_results(TestRunResultsStorageInterfa
function simpletest_script_format_result($result): void {
global $args, $results_map, $color;
- $summary = sprintf("%-9.9s %9.3fs %-80.80s\n", $results_map[$result->status], $result->time, trim_with_ellipsis($result->function, 80, STR_PAD_LEFT));
+ $summary = sprintf("%-9.9s %9.3fs %s\n", $results_map[$result->status], $result->time, trim_with_ellipsis($result->function, 80, STR_PAD_LEFT));
simpletest_script_print($summary, simpletest_script_color_code($result->status));
@@ -1540,12 +1610,12 @@ function simpletest_script_load_messages_by_test_id(TestRunResultsStorageInterfa
*/
function trim_with_ellipsis(string $input, int $length, int $side): string {
if (strlen($input) < $length) {
- return str_pad($input, $length, ' ', STR_PAD_RIGHT);
+ return str_pad($input, $length, ' ', \STR_PAD_RIGHT);
}
elseif (strlen($input) > $length) {
return match($side) {
- STR_PAD_RIGHT => substr($input, 0, $length - 3) . '...',
- default => '...' . substr($input, -$length + 3),
+ \STR_PAD_RIGHT => substr($input, 0, $length - 1) . '…',
+ default => '…' . substr($input, -$length + 1),
};
}
return $input;