Merge branch 'PHP-7.4'

This commit is contained in:
Nikita Popov 2019-02-20 11:22:31 +01:00
commit c8e2b333cb
35 changed files with 113 additions and 75 deletions

1
ext/curl/tests/CONFLICTS Normal file
View File

@ -0,0 +1 @@
server

1
ext/ftp/tests/CONFLICTS Normal file
View File

@ -0,0 +1 @@
server

View File

@ -0,0 +1 @@
mysql

View File

@ -4,6 +4,8 @@ Bug #66338 (Optimization binding of class constants is not safely opcacheable)
opcache.enable=0
--SKIPIF--
<?php if (!extension_loaded('Zend OPcache')) die("skip Zend OPcache extension not loaded"); ?>
--CONFLICTS--
server
--FILE--
<?php
$root = str_replace('.php', "", __FILE__);

View File

@ -7,6 +7,8 @@ phar.readonly=0
--SKIPIF--
<?php require_once('skipif.inc'); ?>
<?php if (!extension_loaded("phar")) die("skip"); ?>
--CONFLICTS--
server
--FILE--
<?php
$stub = '<?php

View File

@ -7,6 +7,8 @@ phar.readonly=0
--SKIPIF--
<?php require_once('skipif.inc'); ?>
<?php if (!extension_loaded("phar")) die("skip"); ?>
--CONFLICTS--
server
--FILE--
<?php
$stub = "<?php header('Content-Type: text/plain;');

View File

@ -6,6 +6,8 @@ opcache.enable_cli=1
opcache.revalidate_path=1
--SKIPIF--
<?php require_once('skipif.inc'); ?>
--CONFLICTS--
server
--FILE--
<?php
$dir = dirname(__FILE__);

View File

@ -0,0 +1 @@
server

View File

@ -0,0 +1 @@
mysql

View File

@ -0,0 +1 @@
pgsql

View File

@ -0,0 +1 @@
pgsql

View File

@ -0,0 +1 @@
session

View File

@ -1,5 +1,7 @@
--TEST--
Bug #73037 SoapServer reports Bad Request when gzipped, var 0
--CONFLICTS--
server
--SKIPIF--
<?php
require_once('skipif.inc');

View File

@ -2,10 +2,12 @@
Bug #42364 (Crash when using getRealPath with DirectoryIterator)
--FILE--
<?php
$it = new DirectoryIterator(dirname(__FILE__));
$dir = __DIR__ . '/bug42364';
@mkdir($dir);
touch($dir . '/test');
$count = 0;
$it = new DirectoryIterator($dir);
foreach ($it as $e) {
$count++;
$type = gettype($e->getRealPath());
@ -19,6 +21,8 @@ if ($count > 0) {
}
?>
===DONE===
--CLEAN--
<?php rmdir(__DIR__ . '/bug42364'); ?>
--EXPECTF--
Found %i entries!
===DONE===

View File

@ -0,0 +1 @@
windows_mb_path

View File

@ -0,0 +1 @@
server

View File

@ -9,6 +9,8 @@ if(!$res) {
die("skip could not open cli server script");
}
?>
--CONFLICTS--
server
--FILE--
<?php
include dirname(__FILE__)."/../../../../sapi/cli/tests/php_cli_server.inc";

View File

@ -4,6 +4,8 @@ Bug #70198 Checking liveness does not work as expected
<?php
if (getenv("SKIP_SLOW_TESTS")) die("skip slow test");
?>
--CONFLICTS--
server
--FILE--
<?php

View File

@ -6,7 +6,7 @@ Bug #69900 Commandline input/output weird behaviour with STDIO
error_reporting(E_ALL);
$fl = dirname(__FILE__) . DIRECTORY_SEPARATOR . "test69900.php";
$max_ms = ((bool)getenv('TRAVIS') || (bool)getenv('APPVEYOR')) ? 10 : 1;
$max_ms = 10;
$test_content = '<?php
@ -34,7 +34,13 @@ for($i = 0; $i < 10; $i++){
$t1 = microtime(1);
echo $s;
echo "fgets() took ", (($t1 - $t0)*1000 > $max_ms ? 'more' : 'less'), " than $max_ms ms\n";
$dt_ms = ($t1 - $t0)*1000;
if ($dt_ms > $max_ms) {
echo "fgets() took more than $max_ms ms ($dt_ms ms)\n";
} else {
echo "fgets() took less than $max_ms ms\n";
}
}
fclose($pipes[0]);
@ -51,7 +57,7 @@ $fl = dirname(__FILE__) . DIRECTORY_SEPARATOR . "test69900.php";
?>
--EXPECTF--
hello0
fgets() took more than %d ms
fgets() took more than %d ms (%s ms)
hello1
fgets() took less than %d ms
hello2

View File

@ -1,5 +1,7 @@
--TEST--
Test get_headers() function : test with context
--CONFLICTS--
server
--FILE--
<?php

View File

@ -1348,73 +1348,37 @@ function run_all_tests_parallel($test_files, $env, $redir_tested) {
echo "====⚡️==== WELCOME TO THE FUTURE: run-tests PARALLEL EDITION ====⚡️====\n";
echo "====⚡️===========================================================⚡️====\n";
// Because some of the PHP test suite has not been written with
// parallel execution in mind, it is not safe to just run any two tests
// concurrently.
// Therefore, we divide the test set into directories and test multiple
// directories at once, but not multiple tests within them.
$testDirsToGo = [];
// Each test may specify a list of conflict keys. While a test that conflicts with
// key K is running, no other test that conflicts with K may run. Conflict keys are
// specified either in the --CONFLICTS-- section, or CONFLICTS file inside a directory.
$dirConflictsWith = [];
$fileConflictsWith = [];
foreach ($test_files as $file) {
$dirSeparator = strrpos($file, DIRECTORY_SEPARATOR);
if ($dirSeparator !== FALSE) {
$testDirsToGo[substr($file, 0, $dirSeparator)][] = $file;
$contents = file_get_contents($file);
if (preg_match('/^--CONFLICTS--(.+?)^--/ms', $contents, $matches)) {
$conflicts = array_map('trim', explode("\n", trim($matches[1])));
} else {
$testDirsToGo[""][] = $file;
}
}
// We assume most test directories should be executed in serial, but for
// big directories, this would waste time if they can actually be parallel.
// Therefore, if a directory has a special '@CAN_BE_PARALLELISED' file, we
// will divide it up into smaller “directories” automatically.
foreach ($testDirsToGo as $dir => $tests) {
if (count($tests) < 64 || !is_string($dir)) {
continue;
}
if (file_exists($dir . DIRECTORY_SEPARATOR . '@CAN_BE_PARALLELISED')) {
foreach (array_chunk($tests, 64) as $testsChunk) {
$testDirsToGo[] = $testsChunk;
// Cache per-directory conflicts in a separate map, so we compute these only once.
$dir = dirname($file);
if (!isset($dirConflictsWith[$dir])) {
$dirConflicts = [];
if (file_exists($dir . '/CONFLICTS')) {
$contents = file_get_contents($dir . '/CONFLICTS');
$dirConflicts = array_map('trim', explode("\n", trim($contents)));
}
$dirConflictsWith[$dir] = $dirConflicts;
}
unset($testDirsToGo[$dir]);
}
}
// Sort test dirs so the biggest ones are handled first, so we spend less
// time waiting on workers tasked with very large dirs.
// This is an ascending sort because items are popped off the end.
// Thank you Rasmus for this idea :)
uasort($testDirsToGo, function ($a, $b) {
return count($a) <=> count($b);
});
$testDirsInProgress = 0;
echo "Isolated ", count($testDirsToGo), " directories to be tested in parallel.\n";
$shamedDirs = array_reverse(array_filter($testDirsToGo, function ($files) {
return count($files) > 100;
}), true);
if ($shamedDirs) {
$shameList = "";
foreach ($shamedDirs as $dir => $shame) {
$shameList .= "\n$dir: " . count($shame) . " files";
$conflicts = $dirConflictsWith[$dir];
}
echo <<<NAME_AND_SHAME
----⚠️-----------------------------------------------------------⚠️----
To effectively utilise parallelism, test directories should not contain
large numbers of tests that can't be run simultaneously. The following
directories contain more than 100 test files and do not contain a
'@CAN_BE_PARALLELISED' file:
$shameList
----⚠️-----------------------------------------------------------⚠️----
NAME_AND_SHAME;
$fileConflictsWith[$file] = $conflicts;
}
// Some tests assume that they are executed in a certain order. We will be popping from
// $test_files, so reverse its order here. This makes sure that order is preserved at least
// for tests with a common conflict key.
$test_files = array_reverse($test_files);
echo "Spawning workers… ";
// We use sockets rather than STDIN/STDOUT for comms because on Windows,
@ -1500,9 +1464,15 @@ NAME_AND_SHAME;
echo "\n";
$rawMessageBuffers = [];
$testsInProgress = 0;
// Map from conflict key to worker ID.
$activeConflicts = [];
// Tests waiting due to conflicts. Map from conflict key to array.
$waitingTests = [];
escape:
while ($testDirsToGo || ($testDirsInProgress > 0)) {
while ($test_files || $testsInProgress > 0) {
$toRead = array_values($workerSocks);
$toWrite = NULL;
$toExcept = NULL;
@ -1532,15 +1502,43 @@ escape:
}
switch ($message["type"]) {
case "dir_finished":
$testDirsInProgress--;
case "tests_finished":
$testsInProgress--;
foreach ($activeConflicts as $key => $workerId) {
if ($workerId === $i) {
unset($activeConflicts[$key]);
if (isset($waitingTests[$key])) {
while ($test = array_pop($waitingTests[$key])) {
$test_files[] = $test;
}
unset($waitingTests[$key]);
}
}
}
// intentional fall-through
case "ready":
if ($testDir = array_pop($testDirsToGo)) {
$testDirsInProgress++;
// Batch multiple tests to reduce communication overhead.
$files = [];
$batchSize = 32;
while (count($files) <= $batchSize && $file = array_pop($test_files)) {
foreach ($fileConflictsWith[$file] as $conflictKey) {
if (isset($activeConflicts[$conflictKey])) {
$waitingTests[$conflictKey][] = $file;
continue 2;
}
}
$files[] = $file;
}
if ($files) {
foreach ($files as $file) {
foreach ($fileConflictsWith[$file] as $conflictKey) {
$activeConflicts[$conflictKey] = $i;
}
}
$testsInProgress++;
send_message($workerSocks[$i], [
"type" => "run_tests",
"test_files" => $testDir,
"test_files" => $files,
"env" => $env,
"redir_tested" => $redir_tested
]);
@ -1609,8 +1607,8 @@ escape:
kill_children($workerProcs);
if ($testDirsInProgress < 0) {
error("$testDirsInProgress test directories “in progress”, which is less than zero. THIS SHOULD NOT HAPPEN.");
if ($testsInProgress < 0) {
error("$testsInProgress test batches “in progress”, which is less than zero. THIS SHOULD NOT HAPPEN.");
}
}
@ -1677,7 +1675,7 @@ function run_worker() {
case "run_tests":
run_all_tests($command["test_files"], $command["env"], $command["redir_tested"]);
send_message($workerSock, [
"type" => "dir_finished"
"type" => "tests_finished"
]);
break;
default:
@ -1793,7 +1791,7 @@ TEST $file
'CAPTURE_STDIO', 'STDIN', 'CGI', 'PHPDBG',
'INI', 'ENV', 'EXTENSIONS',
'SKIPIF', 'XFAIL', 'CLEAN',
'CREDITS', 'DESCRIPTION',
'CREDITS', 'DESCRIPTION', 'CONFLICTS',
))) {
$bork_info = 'Unknown section "' . $section . '"';
}

1
sapi/cli/tests/CONFLICTS Normal file
View File

@ -0,0 +1 @@
server

View File

@ -2,6 +2,8 @@
php://input is empty when enable_post_data_reading=Off
--INI--
allow_url_fopen=1
--CONFLICTS--
server
--SKIPIF--
<?php
include __DIR__."/../../sapi/cli/tests/skipif.inc";

1
tests/security/CONFLICTS Normal file
View File

@ -0,0 +1 @@
open_basedir