-
Notifications
You must be signed in to change notification settings - Fork 13
/
stringifiedphp.json
1 lines (1 loc) · 140 KB
/
stringifiedphp.json
1
"#!\/usr\/bin\/env php\n<?php\n\/*\n +----------------------------------------------------------------------+\n | Copyright (c) The PHP Group |\n +----------------------------------------------------------------------+\n | This source file is subject to version 3.01 of the PHP license, |\n | that is bundled with this package in the file LICENSE, and is |\n | available through the world-wide-web at the following url: |\n | https:\/\/php.net\/license\/3_01.txt |\n | If you did not receive a copy of the PHP license and are unable to |\n | obtain it through the world-wide-web, please send a note to |\n | license@php.net so we can mail you a copy immediately. |\n +----------------------------------------------------------------------+\n | Authors: Ilia Alshanetsky <iliaa@php.net> |\n | Preston L. Bannister <pbannister@php.net> |\n | Marcus Boerger <helly@php.net> |\n | Derick Rethans <derick@php.net> |\n | Sander Roobol <sander@php.net> |\n | Andrea Faulds <ajf@ajf.me> |\n | (based on version by: Stig Bakken <ssb@php.net>) |\n | (based on the PHP 3 test framework by Rasmus Lerdorf) |\n +----------------------------------------------------------------------+\n *\/\n\n\/* $Id: f351564bff97866c63a7a8e402cb0cb2a53e642f $ *\/\n\n\/* Temporary variables while this file is being refactored. *\/\n\/** @var ?JUnit *\/\n$junit = null;\n\n\/* End temporary variables. *\/\n\n\/* Let there be no top-level code beyond this point:\n * Only functions and classes, thanks!\n *\n * Minimum required PHP version: 7.4.0\n *\/\n\nfunction show_usage(): void\n{\n echo <<<HELP\nSynopsis:\n php run-tests.php [options] [files] [directories]\n\nOptions:\n -j<workers> Run up to <workers> simultaneous testing processes in parallel for\n quicker testing on systems with multiple logical processors.\n Note that this is experimental feature.\n\n -l <file> Read the testfiles to be executed from <file>. After the test\n has finished all failed tests are written to the same <file>.\n If the list is empty and no further test is specified then\n all tests are executed (same as: -r <file> -w <file>).\n\n -r <file> Read the testfiles to be executed from <file>.\n\n -w <file> Write a list of all failed tests to <file>.\n\n -a <file> Same as -w but append rather then truncating <file>.\n\n -W <file> Write a list of all tests and their result status to <file>.\n\n -c <file> Look for php.ini in directory <file> or use <file> as ini.\n\n -n Pass -n option to the php binary (Do not use a php.ini).\n\n -d foo=bar Pass -d option to the php binary (Define INI entry foo\n with value 'bar').\n\n -g Comma separated list of groups to show during test run\n (possible values: PASS, FAIL, XFAIL, XLEAK, SKIP, BORK, WARN, LEAK, REDIRECT).\n\n -m Test for memory leaks with Valgrind (equivalent to -M memcheck).\n\n -M <tool> Test for errors with Valgrind tool.\n\n -p <php> Specify PHP executable to run.\n\n -P Use PHP_BINARY as PHP executable to run (default).\n\n -q Quiet, no user interaction (same as environment NO_INTERACTION).\n\n -s <file> Write output to <file>.\n\n -x Sets 'SKIP_SLOW_TESTS' environmental variable.\n\n --offline Sets 'SKIP_ONLINE_TESTS' environmental variable.\n\n --verbose\n -v Verbose mode.\n\n --help\n -h This Help.\n\n --temp-source <sdir> --temp-target <tdir> [--temp-urlbase <url>]\n Write temporary files to <tdir> by replacing <sdir> from the\n filenames to generate with <tdir>. In general you want to make\n <sdir> the path to your source files and <tdir> some patch in\n your web page hierarchy with <url> pointing to <tdir>.\n\n --keep-[all|php|skip|clean]\n Do not delete 'all' files, 'php' test file, 'skip' or 'clean'\n file.\n\n --set-timeout <n>\n Set timeout for individual tests, where <n> is the number of\n seconds. The default value is 60 seconds, or 300 seconds when\n testing for memory leaks.\n\n --context <n>\n Sets the number of lines of surrounding context to print for diffs.\n The default value is 3.\n\n --show-[all|php|skip|clean|exp|diff|out|mem]\n Show 'all' files, 'php' test file, 'skip' or 'clean' file. You\n can also use this to show the output 'out', the expected result\n 'exp', the difference between them 'diff' or the valgrind log\n 'mem'. The result types get written independent of the log format,\n however 'diff' only exists when a test fails.\n\n --show-slow <n>\n Show all tests that took longer than <n> milliseconds to run.\n\n --no-clean Do not execute clean section if any.\n\n --color\n --no-color Do\/Don't colorize the result type in the test result.\n\n --progress\n --no-progress Do\/Don't show the current progress.\n\n --repeat [n]\n Run the tests multiple times in the same process and check the\n output of the last execution (CLI SAPI only).\n\n --bless Bless failed tests using scripts\/dev\/bless_tests.php.\n\nHELP;\n}\n\n\/**\n * One function to rule them all, one function to find them, one function to\n * bring them all and in the darkness bind them.\n * This is the entry point and exit point \u00fcberfunction. It contains all the\n * code that was previously found at the top level. It could and should be\n * refactored to be smaller and more manageable.\n *\/\nfunction main(): void\n{\n \/* This list was derived in a na\u00efve mechanical fashion. If a member\n * looks like it doesn't belong, it probably doesn't; cull at will.\n *\/\n global $DETAILED, $PHP_FAILED_TESTS, $SHOW_ONLY_GROUPS, $argc, $argv, $cfg,\n $cfgfiles, $cfgtypes, $conf_passed, $end_time, $environment,\n $exts_skipped, $exts_tested, $exts_to_test, $failed_tests_file,\n $ignored_by_ext, $ini_overwrites, $is_switch, $colorize,\n $log_format, $matches, $no_clean, $no_file_cache,\n $optionals, $pass_option_n, $pass_options,\n $pattern_match, $php, $php_cgi, $phpdbg, $preload, $redir_tests,\n $repeat, $result_tests_file, $slow_min_ms, $start_time, $switch,\n $temp_source, $temp_target, $test_cnt, $test_dirs,\n $test_files, $test_idx, $test_list, $test_results, $testfile,\n $user_tests, $valgrind, $sum_results, $shuffle, $file_cache, $num_repeats,\n $bless, $show_progress;\n \/\/ Parallel testing\n global $workers, $workerID;\n global $context_line_count;\n\n \/\/ Temporary for the duration of refactoring\n \/** @var JUnit *\/\n global $junit;\n\n define('IS_WINDOWS', substr(PHP_OS, 0, 3) == \"WIN\");\n\n $workerID = 0;\n if (getenv(\"TEST_PHP_WORKER\")) {\n $workerID = intval(getenv(\"TEST_PHP_WORKER\"));\n run_worker();\n return;\n }\n\n define('INIT_DIR', getcwd());\n\n \/\/ Change into the PHP source directory.\n if (getenv('TEST_PHP_SRCDIR')) {\n @chdir(getenv('TEST_PHP_SRCDIR'));\n }\n\n define('TEST_PHP_SRCDIR', getcwd());\n\n check_proc_open_function_exists();\n\n \/\/ If timezone is not set, use UTC.\n if (ini_get('date.timezone') == '') {\n date_default_timezone_set('UTC');\n }\n\n \/\/ Delete some security related environment variables\n putenv('SSH_CLIENT=deleted');\n putenv('SSH_AUTH_SOCK=deleted');\n putenv('SSH_TTY=deleted');\n putenv('SSH_CONNECTION=deleted');\n\n set_time_limit(0);\n\n ini_set('pcre.backtrack_limit', PHP_INT_MAX);\n\n init_output_buffers();\n\n error_reporting(E_ALL);\n\n $environment = $_ENV ?? [];\n\n \/\/ Some configurations like php.ini-development set variables_order=\"GPCS\"\n \/\/ not \"EGPCS\", in which case $_ENV is NOT populated. Detect if the $_ENV\n \/\/ was empty and handle it by explicitly populating through getenv().\n if (empty($environment)) {\n $environment = getenv();\n }\n\n if (empty($environment['TEMP'])) {\n $environment['TEMP'] = sys_get_temp_dir();\n\n if (empty($environment['TEMP'])) {\n \/\/ For example, OpCache on Windows will fail in this case because\n \/\/ child processes (for tests) will not get a TEMP variable, so\n \/\/ GetTempPath() will fallback to c:\\windows, while GetTempPath()\n \/\/ will return %TEMP% for parent (likely a different path). The\n \/\/ parent will initialize the OpCache in that path, and child will\n \/\/ fail to reattach to the OpCache because it will be using the\n \/\/ wrong path.\n die(\"TEMP environment is NOT set\");\n }\n\n if (count($environment) == 1) {\n \/\/ Not having other environment variables, only having TEMP, is\n \/\/ probably ok, but strange and may make a difference in the\n \/\/ test pass rate, so warn the user.\n echo \"WARNING: Only 1 environment variable will be available to tests(TEMP environment variable)\" , PHP_EOL;\n }\n }\n\n if (IS_WINDOWS && empty($environment[\"SystemRoot\"])) {\n $environment[\"SystemRoot\"] = getenv(\"SystemRoot\");\n }\n\n $php = null;\n $php_cgi = null;\n $phpdbg = null;\n\n if (getenv('TEST_PHP_LOG_FORMAT')) {\n $log_format = strtoupper(getenv('TEST_PHP_LOG_FORMAT'));\n } else {\n $log_format = 'LEODS';\n }\n\n \/\/ Check whether a detailed log is wanted.\n if (getenv('TEST_PHP_DETAILED')) {\n $DETAILED = getenv('TEST_PHP_DETAILED');\n } else {\n $DETAILED = 0;\n }\n\n $junit = new JUnit($environment, $workerID);\n\n if (getenv('SHOW_ONLY_GROUPS')) {\n $SHOW_ONLY_GROUPS = explode(\",\", getenv('SHOW_ONLY_GROUPS'));\n } else {\n $SHOW_ONLY_GROUPS = [];\n }\n\n \/\/ Check whether user test dirs are requested.\n if (getenv('TEST_PHP_USER')) {\n $user_tests = explode(',', getenv('TEST_PHP_USER'));\n } else {\n $user_tests = [];\n }\n\n $exts_to_test = [];\n $ini_overwrites = [\n 'output_handler=',\n 'open_basedir=',\n 'disable_functions=',\n 'output_buffering=Off',\n 'error_reporting=' . E_ALL,\n 'display_errors=1',\n 'display_startup_errors=1',\n 'log_errors=0',\n 'html_errors=0',\n 'track_errors=0',\n 'report_memleaks=1',\n 'report_zend_debug=0',\n 'docref_root=',\n 'docref_ext=.html',\n 'error_prepend_string=',\n 'error_append_string=',\n 'auto_prepend_file=',\n 'auto_append_file=',\n 'ignore_repeated_errors=0',\n 'precision=14',\n 'serialize_precision=-1',\n 'memory_limit=128M',\n 'opcache.fast_shutdown=0',\n 'opcache.file_update_protection=0',\n 'opcache.revalidate_freq=0',\n 'opcache.jit_hot_loop=1',\n 'opcache.jit_hot_func=1',\n 'opcache.jit_hot_return=1',\n 'opcache.jit_hot_side_exit=1',\n 'zend.assertions=1',\n 'zend.exception_ignore_args=0',\n 'zend.exception_string_param_max_len=15',\n 'short_open_tag=0',\n ];\n\n $no_file_cache = '-d opcache.file_cache= -d opcache.file_cache_only=0';\n\n define('TRAVIS_CI', (bool) getenv('TRAVIS'));\n\n \/\/ Determine the tests to be run.\n\n $test_files = [];\n $redir_tests = [];\n $test_results = [];\n $PHP_FAILED_TESTS = [\n 'BORKED' => [],\n 'FAILED' => [],\n 'WARNED' => [],\n 'LEAKED' => [],\n 'XFAILED' => [],\n 'XLEAKED' => [],\n 'SLOW' => []\n ];\n\n \/\/ If parameters given assume they represent selected tests to run.\n $result_tests_file = false;\n $failed_tests_file = false;\n $pass_option_n = false;\n $pass_options = '';\n\n $output_file = INIT_DIR . '\/php_test_results_' . date('Ymd_Hi') . '.txt';\n\n $just_save_results = false;\n $valgrind = null;\n $temp_source = null;\n $temp_target = null;\n $conf_passed = null;\n $no_clean = false;\n $colorize = true;\n if (function_exists('sapi_windows_vt100_support') && !sapi_windows_vt100_support(STDOUT, true)) {\n $colorize = false;\n }\n if (array_key_exists('NO_COLOR', $environment)) {\n $colorize = false;\n }\n $selected_tests = false;\n $slow_min_ms = INF;\n $preload = false;\n $file_cache = null;\n $shuffle = false;\n $bless = false;\n $workers = null;\n $context_line_count = 3;\n $num_repeats = 1;\n $show_progress = true;\n\n $cfgtypes = ['show', 'keep'];\n $cfgfiles = ['skip', 'php', 'clean', 'out', 'diff', 'exp', 'mem'];\n $cfg = [];\n\n foreach ($cfgtypes as $type) {\n $cfg[$type] = [];\n\n foreach ($cfgfiles as $file) {\n $cfg[$type][$file] = false;\n }\n }\n\n if (!isset($argc, $argv) || !$argc) {\n $argv = [__FILE__];\n $argc = 1;\n }\n\n if (getenv('TEST_PHP_ARGS')) {\n $argv = array_merge($argv, explode(' ', getenv('TEST_PHP_ARGS')));\n $argc = count($argv);\n }\n\n for ($i = 1; $i < $argc; $i++) {\n $is_switch = false;\n $switch = substr($argv[$i], 1, 1);\n $repeat = substr($argv[$i], 0, 1) == '-';\n\n while ($repeat) {\n if (!$is_switch) {\n $switch = substr($argv[$i], 1, 1);\n }\n\n $is_switch = true;\n\n if ($repeat) {\n foreach ($cfgtypes as $type) {\n if (strpos($switch, '--' . $type) === 0) {\n foreach ($cfgfiles as $file) {\n if ($switch == '--' . $type . '-' . $file) {\n $cfg[$type][$file] = true;\n $is_switch = false;\n break;\n }\n }\n }\n }\n }\n\n if (!$is_switch) {\n $is_switch = true;\n break;\n }\n\n $repeat = false;\n\n switch ($switch) {\n case 'j':\n $workers = substr($argv[$i], 2);\n if ($workers == 0 || !preg_match('\/^\\d+$\/', $workers)) {\n error(\"'$workers' is not a valid number of workers, try e.g. -j16 for 16 workers\");\n }\n $workers = intval($workers, 10);\n \/\/ Don't use parallel testing infrastructure if there is only one worker.\n if ($workers === 1) {\n $workers = null;\n }\n break;\n case 'r':\n case 'l':\n $test_list = file($argv[++$i]);\n if ($test_list) {\n foreach ($test_list as $test) {\n $matches = [];\n if (preg_match('\/^#.*\\[(.*)\\]\\:\\s+(.*)$\/', $test, $matches)) {\n $redir_tests[] = [$matches[1], $matches[2]];\n } elseif (strlen($test)) {\n $test_files[] = trim($test);\n }\n }\n }\n if ($switch != 'l') {\n break;\n }\n $i--;\n \/\/ no break\n case 'w':\n $failed_tests_file = fopen($argv[++$i], 'w+t');\n break;\n case 'a':\n $failed_tests_file = fopen($argv[++$i], 'a+t');\n break;\n case 'W':\n $result_tests_file = fopen($argv[++$i], 'w+t');\n break;\n case 'c':\n $conf_passed = $argv[++$i];\n break;\n case 'd':\n $ini_overwrites[] = $argv[++$i];\n break;\n case 'g':\n $SHOW_ONLY_GROUPS = explode(\",\", $argv[++$i]);\n break;\n \/\/case 'h'\n case '--keep-all':\n foreach ($cfgfiles as $file) {\n $cfg['keep'][$file] = true;\n }\n break;\n \/\/case 'l'\n case 'm':\n $valgrind = new RuntestsValgrind($environment);\n break;\n case 'M':\n $valgrind = new RuntestsValgrind($environment, $argv[++$i]);\n break;\n case 'n':\n if (!$pass_option_n) {\n $pass_options .= ' -n';\n }\n $pass_option_n = true;\n break;\n case 'e':\n $pass_options .= ' -e';\n break;\n case '--preload':\n $preload = true;\n $environment['SKIP_PRELOAD'] = 1;\n break;\n case '--file-cache-prime':\n $file_cache = 'prime';\n break;\n case '--file-cache-use':\n $file_cache = 'use';\n break;\n case '--no-clean':\n $no_clean = true;\n break;\n case '--color':\n $colorize = true;\n break;\n case '--no-color':\n $colorize = false;\n break;\n case 'p':\n $php = $argv[++$i];\n putenv(\"TEST_PHP_EXECUTABLE=$php\");\n $environment['TEST_PHP_EXECUTABLE'] = $php;\n break;\n case 'P':\n $php = PHP_BINARY;\n putenv(\"TEST_PHP_EXECUTABLE=$php\");\n $environment['TEST_PHP_EXECUTABLE'] = $php;\n break;\n case 'q':\n putenv('NO_INTERACTION=1');\n $environment['NO_INTERACTION'] = 1;\n break;\n \/\/case 'r'\n case 's':\n $output_file = $argv[++$i];\n $just_save_results = true;\n break;\n case '--set-timeout':\n $timeout = $argv[++$i] ?? '';\n if (!preg_match('\/^\\d+$\/', $timeout)) {\n error(\"'$timeout' is not a valid number of seconds, try e.g. --set-timeout 60 for 1 minute\");\n }\n $environment['TEST_TIMEOUT'] = intval($timeout, 10);\n break;\n case '--context':\n $context_line_count = $argv[++$i] ?? '';\n if (!preg_match('\/^\\d+$\/', $context_line_count)) {\n error(\"'$context_line_count' is not a valid number of lines of context, try e.g. --context 3 for 3 lines\");\n }\n $context_line_count = intval($context_line_count, 10);\n break;\n case '--show-all':\n foreach ($cfgfiles as $file) {\n $cfg['show'][$file] = true;\n }\n break;\n case '--show-slow':\n $slow_min_ms = $argv[++$i] ?? '';\n if (!preg_match('\/^\\d+$\/', $slow_min_ms)) {\n error(\"'$slow_min_ms' is not a valid number of milliseconds, try e.g. --show-slow 1000 for 1 second\");\n }\n $slow_min_ms = intval($slow_min_ms, 10);\n break;\n case '--temp-source':\n $temp_source = $argv[++$i];\n break;\n case '--temp-target':\n $temp_target = $argv[++$i];\n break;\n case 'v':\n case '--verbose':\n $DETAILED = true;\n break;\n case 'x':\n $environment['SKIP_SLOW_TESTS'] = 1;\n break;\n case '--offline':\n $environment['SKIP_ONLINE_TESTS'] = 1;\n break;\n case '--shuffle':\n $shuffle = true;\n break;\n case '--asan':\n case '--msan':\n $environment['USE_ZEND_ALLOC'] = 0;\n $environment['USE_TRACKED_ALLOC'] = 1;\n $environment['SKIP_ASAN'] = 1;\n $environment['SKIP_PERF_SENSITIVE'] = 1;\n if ($switch === '--msan') {\n $environment['SKIP_MSAN'] = 1;\n }\n\n $lsanSuppressions = __DIR__ . '\/azure\/lsan-suppressions.txt';\n if (file_exists($lsanSuppressions)) {\n $environment['LSAN_OPTIONS'] = 'suppressions=' . $lsanSuppressions\n . ':print_suppressions=0';\n }\n break;\n case '--repeat':\n $num_repeats = (int) $argv[++$i];\n $environment['SKIP_REPEAT'] = 1;\n break;\n case '--bless':\n $bless = true;\n break;\n \/\/case 'w'\n case '-':\n \/\/ repeat check with full switch\n $switch = $argv[$i];\n if ($switch != '-') {\n $repeat = true;\n }\n break;\n case '--progress':\n $show_progress = true;\n break;\n case '--no-progress':\n $show_progress = false;\n break;\n case '--version':\n echo '$Id: f351564bff97866c63a7a8e402cb0cb2a53e642f $' . \"\\n\";\n exit(1);\n\n default:\n echo \"Illegal switch '$switch' specified!\\n\";\n \/\/ no break\n case 'h':\n case '-help':\n case '--help':\n show_usage();\n exit(1);\n }\n }\n\n if (!$is_switch) {\n $selected_tests = true;\n $testfile = realpath($argv[$i]);\n\n if (!$testfile && strpos($argv[$i], '*') !== false && function_exists('glob')) {\n if (substr($argv[$i], -5) == '.phpt') {\n $pattern_match = glob($argv[$i]);\n } elseif (preg_match(\"\/\\*$\/\", $argv[$i])) {\n $pattern_match = glob($argv[$i] . '.phpt');\n } else {\n die('Cannot find test file \"' . $argv[$i] . '\".' . PHP_EOL);\n }\n\n if (is_array($pattern_match)) {\n $test_files = array_merge($test_files, $pattern_match);\n }\n } elseif (is_dir($testfile)) {\n find_files($testfile);\n } elseif (substr($testfile, -5) == '.phpt') {\n $test_files[] = $testfile;\n } else {\n die('Cannot find test file \"' . $argv[$i] . '\".' . PHP_EOL);\n }\n }\n }\n\n if ($selected_tests && count($test_files) === 0) {\n echo \"No tests found.\\n\";\n return;\n }\n\n if (!$php) {\n $php = getenv('TEST_PHP_EXECUTABLE');\n }\n if (!$php) {\n $php = PHP_BINARY;\n }\n\n if (!$php_cgi) {\n $php_cgi = getenv('TEST_PHP_CGI_EXECUTABLE');\n }\n if (!$php_cgi) {\n $php_cgi = get_binary($php, 'php-cgi', 'sapi\/cgi\/php-cgi');\n }\n\n if (!$phpdbg) {\n $phpdbg = getenv('TEST_PHPDBG_EXECUTABLE');\n }\n if (!$phpdbg) {\n $phpdbg = get_binary($php, 'phpdbg', 'sapi\/phpdbg\/phpdbg');\n }\n\n putenv(\"TEST_PHP_EXECUTABLE=$php\");\n $environment['TEST_PHP_EXECUTABLE'] = $php;\n putenv(\"TEST_PHP_CGI_EXECUTABLE=$php_cgi\");\n $environment['TEST_PHP_CGI_EXECUTABLE'] = $php_cgi;\n putenv(\"TEST_PHPDBG_EXECUTABLE=$phpdbg\");\n $environment['TEST_PHPDBG_EXECUTABLE'] = $phpdbg;\n\n if ($conf_passed !== null) {\n if (IS_WINDOWS) {\n $pass_options .= \" -c \" . escapeshellarg($conf_passed);\n } else {\n $pass_options .= \" -c '\" . realpath($conf_passed) . \"'\";\n }\n }\n\n $test_files = array_unique($test_files);\n $test_files = array_merge($test_files, $redir_tests);\n\n \/\/ Run selected tests.\n $test_cnt = count($test_files);\n\n verify_config();\n write_information();\n\n if ($test_cnt) {\n putenv('NO_INTERACTION=1');\n usort($test_files, \"test_sort\");\n $start_time = time();\n\n echo \"Running selected tests.\\n\";\n\n $test_idx = 0;\n run_all_tests($test_files, $environment);\n $end_time = time();\n\n if ($failed_tests_file) {\n fclose($failed_tests_file);\n }\n\n if ($result_tests_file) {\n fclose($result_tests_file);\n }\n\n if (0 == count($test_results)) {\n echo \"No tests were run.\\n\";\n return;\n }\n\n compute_summary();\n echo \"=====================================================================\";\n echo get_summary(false);\n\n if ($output_file != '' && $just_save_results) {\n save_results($output_file, \/* prompt_to_save_results: *\/ false);\n }\n } else {\n \/\/ Compile a list of all test files (*.phpt).\n $test_files = [];\n $exts_tested = count($exts_to_test);\n $exts_skipped = 0;\n $ignored_by_ext = 0;\n sort($exts_to_test);\n $test_dirs = [];\n $optionals = ['Zend', 'tests', 'ext', 'sapi'];\n\n foreach ($optionals as $dir) {\n if (is_dir($dir)) {\n $test_dirs[] = $dir;\n }\n }\n\n \/\/ Convert extension names to lowercase\n foreach ($exts_to_test as $key => $val) {\n $exts_to_test[$key] = strtolower($val);\n }\n\n foreach ($test_dirs as $dir) {\n find_files(TEST_PHP_SRCDIR . \"\/{$dir}\", $dir == 'ext');\n }\n\n foreach ($user_tests as $dir) {\n find_files($dir, $dir == 'ext');\n }\n\n $test_files = array_unique($test_files);\n usort($test_files, \"test_sort\");\n\n $start_time = time();\n show_start($start_time);\n\n $test_cnt = count($test_files);\n $test_idx = 0;\n run_all_tests($test_files, $environment);\n $end_time = time();\n\n if ($failed_tests_file) {\n fclose($failed_tests_file);\n }\n\n if ($result_tests_file) {\n fclose($result_tests_file);\n }\n\n \/\/ Summarize results\n\n if (0 == count($test_results)) {\n echo \"No tests were run.\\n\";\n return;\n }\n\n compute_summary();\n\n show_end($end_time);\n show_summary();\n\n save_results($output_file, \/* prompt_to_save_results: *\/ true);\n }\n\n $junit->saveXML();\n if ($bless) {\n bless_failed_tests($PHP_FAILED_TESTS['FAILED']);\n }\n if (getenv('REPORT_EXIT_STATUS') !== '0' && getenv('REPORT_EXIT_STATUS') !== 'no' &&\n ($sum_results['FAILED'] || $sum_results['BORKED'] || $sum_results['LEAKED'])) {\n exit(1);\n }\n}\n\nif (!function_exists(\"hrtime\")) {\n \/**\n * @return array|float|int\n *\/\n function hrtime(bool $as_num = false)\n {\n $t = microtime(true);\n\n if ($as_num) {\n return $t * 1000000000;\n }\n\n $s = floor($t);\n return [0 => $s, 1 => ($t - $s) * 1000000000];\n }\n}\n\nfunction verify_config(): void\n{\n global $php;\n\n if (empty($php) || !file_exists($php)) {\n error('environment variable TEST_PHP_EXECUTABLE must be set to specify PHP executable!');\n }\n\n if (!is_executable($php)) {\n error(\"invalid PHP executable specified by TEST_PHP_EXECUTABLE = $php\");\n }\n}\n\nfunction write_information(): void\n{\n global $php, $php_cgi, $phpdbg, $php_info, $user_tests, $ini_overwrites, $pass_options, $exts_to_test, $valgrind, $no_file_cache;\n\n \/\/ Get info from php\n $info_file = __DIR__ . '\/run-test-info.php';\n @unlink($info_file);\n $php_info = '<?php echo \"\nPHP_SAPI : \" , PHP_SAPI , \"\nPHP_VERSION : \" , phpversion() , \"\nZEND_VERSION: \" , zend_version() , \"\nPHP_OS : \" , PHP_OS , \" - \" , php_uname() , \"\nINI actual : \" , realpath(get_cfg_var(\"cfg_file_path\")) , \"\nMore .INIs : \" , (function_exists(\\'php_ini_scanned_files\\') ? str_replace(\"\\n\",\"\", php_ini_scanned_files()) : \"** not determined **\"); ?>';\n save_text($info_file, $php_info);\n $info_params = [];\n settings2array($ini_overwrites, $info_params);\n $info_params = settings2params($info_params);\n $php_info = shell_exec(\"$php $pass_options $info_params $no_file_cache \\\"$info_file\\\"\");\n define('TESTED_PHP_VERSION', shell_exec(\"$php -n -r \\\"echo PHP_VERSION;\\\"\"));\n\n if ($php_cgi && $php != $php_cgi) {\n $php_info_cgi = shell_exec(\"$php_cgi $pass_options $info_params $no_file_cache -q \\\"$info_file\\\"\");\n $php_info_sep = \"\\n---------------------------------------------------------------------\";\n $php_cgi_info = \"$php_info_sep\\nPHP : $php_cgi $php_info_cgi$php_info_sep\";\n } else {\n $php_cgi_info = '';\n }\n\n if ($phpdbg) {\n $phpdbg_info = shell_exec(\"$phpdbg $pass_options $info_params $no_file_cache -qrr \\\"$info_file\\\"\");\n $php_info_sep = \"\\n---------------------------------------------------------------------\";\n $phpdbg_info = \"$php_info_sep\\nPHP : $phpdbg $phpdbg_info$php_info_sep\";\n } else {\n $phpdbg_info = '';\n }\n\n if (function_exists('opcache_invalidate')) {\n opcache_invalidate($info_file, true);\n }\n @unlink($info_file);\n\n \/\/ load list of enabled and loadable extensions\n save_text($info_file, <<<'PHP'\n <?php\n echo str_replace(\"Zend OPcache\", \"opcache\", implode(\",\", get_loaded_extensions()));\n $ext_dir = ini_get(\"extension_dir\");\n foreach (scandir($ext_dir) as $file) {\n if (!preg_match('\/^(?:php_)?([_a-zA-Z0-9]+)\\.(?:so|dll)$\/', $file, $matches)) {\n continue;\n }\n $ext = $matches[1];\n if (!extension_loaded($ext) && @dl($file)) {\n echo \",\", $ext;\n }\n }\n ?>\n PHP);\n $exts_to_test = explode(',', shell_exec(\"$php $pass_options $info_params $no_file_cache \\\"$info_file\\\"\"));\n \/\/ check for extensions that need special handling and regenerate\n $info_params_ex = [\n 'session' => ['session.auto_start=0'],\n 'tidy' => ['tidy.clean_output=0'],\n 'zlib' => ['zlib.output_compression=Off'],\n 'xdebug' => ['xdebug.mode=off'],\n ];\n\n foreach ($info_params_ex as $ext => $ini_overwrites_ex) {\n if (in_array($ext, $exts_to_test)) {\n $ini_overwrites = array_merge($ini_overwrites, $ini_overwrites_ex);\n }\n }\n\n if (function_exists('opcache_invalidate')) {\n opcache_invalidate($info_file, true);\n }\n @unlink($info_file);\n\n \/\/ Write test context information.\n echo \"\n=====================================================================\nPHP : $php $php_info $php_cgi_info $phpdbg_info\nCWD : \" . TEST_PHP_SRCDIR . \"\nExtra dirs : \";\n foreach ($user_tests as $test_dir) {\n echo \"{$test_dir}\\n\t\t\t \";\n }\n echo \"\nVALGRIND : \" . ($valgrind ? $valgrind->getHeader() : 'Not used') . \"\n=====================================================================\n\";\n}\n\nfunction save_results(string $output_file, bool $prompt_to_save_results): void\n{\n global $sum_results, $failed_test_summary,\n $PHP_FAILED_TESTS, $php;\n\n if (getenv('NO_INTERACTION') || TRAVIS_CI) {\n return;\n }\n\n if ($prompt_to_save_results) {\n \/* We got failed Tests, offer the user to save a QA report *\/\n $fp = fopen(\"php:\/\/stdin\", \"r+\");\n if ($sum_results['FAILED'] || $sum_results['BORKED'] || $sum_results['WARNED'] || $sum_results['LEAKED']) {\n echo \"\\nYou may have found a problem in PHP.\";\n }\n echo \"\\nThis report can be saved and used to open an issue on the bug tracker at\\n\";\n echo \"https:\/\/github.com\/php\/php-src\/issues\\n\";\n echo \"This gives us a better understanding of PHP's behavior.\\n\";\n echo \"Do you want to save this report in a file? [Yn]: \";\n flush();\n\n $user_input = fgets($fp, 10);\n fclose($fp);\n if (!(strlen(trim($user_input)) == 0 || strtolower($user_input[0]) == 'y')) {\n return;\n }\n }\n \/**\n * Collect information about the host system for our report\n * Fetch phpinfo() output so that we can see the PHP environment\n * Make an archive of all the failed tests\n *\/\n $failed_tests_data = '';\n $sep = \"\\n\" . str_repeat('=', 80) . \"\\n\";\n $failed_tests_data .= $failed_test_summary . \"\\n\";\n $failed_tests_data .= get_summary(true) . \"\\n\";\n\n if ($sum_results['FAILED']) {\n foreach ($PHP_FAILED_TESTS['FAILED'] as $test_info) {\n $failed_tests_data .= $sep . $test_info['name'] . $test_info['info'];\n $failed_tests_data .= $sep . file_get_contents(realpath($test_info['output']));\n $failed_tests_data .= $sep . file_get_contents(realpath($test_info['diff']));\n $failed_tests_data .= $sep . \"\\n\\n\";\n }\n }\n\n $failed_tests_data .= \"\\n\" . $sep . 'BUILD ENVIRONMENT' . $sep;\n $failed_tests_data .= \"OS:\\n\" . PHP_OS . \" - \" . php_uname() . \"\\n\\n\";\n $ldd = $autoconf = $sys_libtool = $libtool = $compiler = 'N\/A';\n\n if (!IS_WINDOWS) {\n \/* If PHP_AUTOCONF is set, use it; otherwise, use 'autoconf'. *\/\n if (getenv('PHP_AUTOCONF')) {\n $autoconf = shell_exec(getenv('PHP_AUTOCONF') . ' --version');\n } else {\n $autoconf = shell_exec('autoconf --version');\n }\n\n \/* Always use the generated libtool - Mac OSX uses 'glibtool' *\/\n $libtool = shell_exec(INIT_DIR . '\/libtool --version');\n\n \/* Use shtool to find out if there is glibtool present (MacOSX) *\/\n $sys_libtool_path = shell_exec(__DIR__ . '\/build\/shtool path glibtool libtool');\n\n if ($sys_libtool_path) {\n $sys_libtool = shell_exec(str_replace(\"\\n\", \"\", $sys_libtool_path) . ' --version');\n }\n\n \/* Try the most common flags for 'version' *\/\n $flags = ['-v', '-V', '--version'];\n $cc_status = 0;\n\n foreach ($flags as $flag) {\n system(getenv('CC') . \" $flag >\/dev\/null 2>&1\", $cc_status);\n if ($cc_status == 0) {\n $compiler = shell_exec(getenv('CC') . \" $flag 2>&1\");\n break;\n }\n }\n\n $ldd = shell_exec(\"ldd $php 2>\/dev\/null\");\n }\n\n $failed_tests_data .= \"Autoconf:\\n$autoconf\\n\";\n $failed_tests_data .= \"Bundled Libtool:\\n$libtool\\n\";\n $failed_tests_data .= \"System Libtool:\\n$sys_libtool\\n\";\n $failed_tests_data .= \"Compiler:\\n$compiler\\n\";\n $failed_tests_data .= \"Bison:\\n\" . shell_exec('bison --version 2>\/dev\/null') . \"\\n\";\n $failed_tests_data .= \"Libraries:\\n$ldd\\n\";\n $failed_tests_data .= \"\\n\";\n $failed_tests_data .= $sep . \"PHPINFO\" . $sep;\n $failed_tests_data .= shell_exec($php . ' -ddisplay_errors=stderr -dhtml_errors=0 -i 2> \/dev\/null');\n\n file_put_contents($output_file, $failed_tests_data);\n echo \"Report saved to: \", $output_file, \"\\n\";\n}\n\nfunction get_binary(string $php, string $sapi, string $sapi_path): ?string\n{\n $dir = dirname($php);\n if (IS_WINDOWS && file_exists(\"$dir\/$sapi.exe\")) {\n return realpath(\"$dir\/$sapi.exe\");\n }\n \/\/ Sources tree\n if (file_exists(\"$dir\/..\/..\/$sapi_path\")) {\n return realpath(\"$dir\/..\/..\/$sapi_path\");\n }\n \/\/ Installation tree, preserve command prefix\/suffix\n $inst = str_replace('php', $sapi, basename($php));\n if (file_exists(\"$dir\/$inst\")) {\n return realpath(\"$dir\/$inst\");\n }\n return null;\n}\n\nfunction find_files(string $dir, bool $is_ext_dir = false, bool $ignore = false): void\n{\n global $test_files, $exts_to_test, $ignored_by_ext, $exts_skipped;\n\n $o = opendir($dir) or error(\"cannot open directory: $dir\");\n\n while (($name = readdir($o)) !== false) {\n if (is_dir(\"{$dir}\/{$name}\") && !in_array($name, ['.', '..', '.svn'])) {\n $skip_ext = ($is_ext_dir && !in_array(strtolower($name), $exts_to_test));\n if ($skip_ext) {\n $exts_skipped++;\n }\n find_files(\"{$dir}\/{$name}\", false, $ignore || $skip_ext);\n }\n\n \/\/ Cleanup any left-over tmp files from last run.\n if (substr($name, -4) == '.tmp') {\n @unlink(\"$dir\/$name\");\n continue;\n }\n\n \/\/ Otherwise we're only interested in *.phpt files.\n \/\/ (but not those starting with a dot, which are hidden on\n \/\/ many platforms)\n if (substr($name, -5) == '.phpt' && substr($name, 0, 1) !== '.') {\n if ($ignore) {\n $ignored_by_ext++;\n } else {\n $testfile = realpath(\"{$dir}\/{$name}\");\n $test_files[] = $testfile;\n }\n }\n }\n\n closedir($o);\n}\n\n\/**\n * @param array|string $name\n *\/\nfunction test_name($name): string\n{\n if (is_array($name)) {\n return $name[0] . ':' . $name[1];\n }\n\n return $name;\n}\n\/**\n * @param array|string $a\n * @param array|string $b\n *\/\nfunction test_sort($a, $b): int\n{\n $a = test_name($a);\n $b = test_name($b);\n\n $ta = strpos($a, TEST_PHP_SRCDIR . \"\/tests\") === 0 ? 1 + (strpos($a,\n TEST_PHP_SRCDIR . \"\/tests\/run-test\") === 0 ? 1 : 0) : 0;\n $tb = strpos($b, TEST_PHP_SRCDIR . \"\/tests\") === 0 ? 1 + (strpos($b,\n TEST_PHP_SRCDIR . \"\/tests\/run-test\") === 0 ? 1 : 0) : 0;\n\n if ($ta == $tb) {\n return strcmp($a, $b);\n }\n\n return $tb - $ta;\n}\n\n\/\/\n\/\/ Write the given text to a temporary file, and return the filename.\n\/\/\n\nfunction save_text(string $filename, string $text, ?string $filename_copy = null): void\n{\n global $DETAILED;\n\n if ($filename_copy && $filename_copy != $filename && file_put_contents($filename_copy, $text) === false) {\n error(\"Cannot open file '\" . $filename_copy . \"' (save_text)\");\n }\n\n if (file_put_contents($filename, $text) === false) {\n error(\"Cannot open file '\" . $filename . \"' (save_text)\");\n }\n\n if (1 < $DETAILED) {\n echo \"\nFILE $filename {{{\n$text\n}}}\n\";\n }\n}\n\n\/\/\n\/\/ Write an error in a format recognizable to Emacs or MSVC.\n\/\/\n\nfunction error_report(string $testname, string $logname, string $tested): void\n{\n $testname = realpath($testname);\n $logname = realpath($logname);\n\n switch (strtoupper(getenv('TEST_PHP_ERROR_STYLE'))) {\n case 'MSVC':\n echo $testname . \"(1) : $tested\\n\";\n echo $logname . \"(1) : $tested\\n\";\n break;\n case 'EMACS':\n echo $testname . \":1: $tested\\n\";\n echo $logname . \":1: $tested\\n\";\n break;\n }\n}\n\n\/**\n * @return false|string\n *\/\nfunction system_with_timeout(\n string $commandline,\n ?array $env = null,\n ?string $stdin = null,\n bool $captureStdIn = true,\n bool $captureStdOut = true,\n bool $captureStdErr = true\n) {\n global $valgrind;\n\n $data = '';\n\n $bin_env = [];\n foreach ((array) $env as $key => $value) {\n $bin_env[$key] = $value;\n }\n\n $descriptorspec = [];\n if ($captureStdIn) {\n $descriptorspec[0] = ['pipe', 'r'];\n }\n if ($captureStdOut) {\n $descriptorspec[1] = ['pipe', 'w'];\n }\n if ($captureStdErr) {\n $descriptorspec[2] = ['pipe', 'w'];\n }\n $proc = proc_open($commandline, $descriptorspec, $pipes, TEST_PHP_SRCDIR, $bin_env, ['suppress_errors' => true]);\n\n if (!$proc) {\n return false;\n }\n\n if ($captureStdIn) {\n if (!is_null($stdin)) {\n fwrite($pipes[0], $stdin);\n }\n fclose($pipes[0]);\n unset($pipes[0]);\n }\n\n $timeout = $valgrind ? 300 : ($env['TEST_TIMEOUT'] ?? 60);\n\n while (true) {\n \/* hide errors from interrupted syscalls *\/\n $r = $pipes;\n $w = null;\n $e = null;\n\n $n = @stream_select($r, $w, $e, $timeout);\n\n if ($n === false) {\n break;\n }\n\n if ($n === 0) {\n \/* timed out *\/\n $data .= \"\\n ** ERROR: process timed out **\\n\";\n proc_terminate($proc, 9);\n return $data;\n }\n\n if ($n > 0) {\n if ($captureStdOut) {\n $line = fread($pipes[1], 8192);\n } elseif ($captureStdErr) {\n $line = fread($pipes[2], 8192);\n } else {\n $line = '';\n }\n if (strlen($line) == 0) {\n \/* EOF *\/\n break;\n }\n $data .= $line;\n }\n }\n\n $stat = proc_get_status($proc);\n\n if ($stat['signaled']) {\n $data .= \"\\nTermsig=\" . $stat['stopsig'] . \"\\n\";\n }\n if ($stat[\"exitcode\"] > 128 && $stat[\"exitcode\"] < 160) {\n $data .= \"\\nTermsig=\" . ($stat[\"exitcode\"] - 128) . \"\\n\";\n } else if (defined('PHP_WINDOWS_VERSION_MAJOR') && (($stat[\"exitcode\"] >> 28) & 0b1111) === 0b1100) {\n \/\/ https:\/\/docs.microsoft.com\/en-us\/openspecs\/windows_protocols\/ms-erref\/87fba13e-bf06-450e-83b1-9241dc81e781\n $data .= \"\\nTermsig=\" . $stat[\"exitcode\"] . \"\\n\";\n }\n\n proc_close($proc);\n return $data;\n}\n\n\/**\n * @param string|array|null $redir_tested\n *\/\nfunction run_all_tests(array $test_files, array $env, $redir_tested = null): void\n{\n global $test_results, $failed_tests_file, $result_tests_file, $php, $test_idx, $file_cache;\n global $preload;\n \/\/ Parallel testing\n global $PHP_FAILED_TESTS, $workers, $workerID, $workerSock;\n\n if ($file_cache !== null || $preload) {\n \/* Automatically skip opcache tests in --file-cache and --preload mode,\n * because opcache generally expects these to run under a default configuration. *\/\n $test_files = array_filter($test_files, function($test) use($preload) {\n if (!is_string($test)) {\n return true;\n }\n if (false !== strpos($test, 'ext\/opcache')) {\n return false;\n }\n if ($preload && false !== strpos($test, 'ext\/zend_test\/tests\/observer')) {\n return false;\n }\n return true;\n });\n }\n\n \/* Ignore -jN if there is only one file to analyze. *\/\n if ($workers !== null && count($test_files) > 1 && !$workerID) {\n run_all_tests_parallel($test_files, $env, $redir_tested);\n return;\n }\n\n foreach ($test_files as $name) {\n if (is_array($name)) {\n $index = \"# $name[1]: $name[0]\";\n\n if ($redir_tested) {\n $name = $name[0];\n }\n } elseif ($redir_tested) {\n $index = \"# $redir_tested: $name\";\n } else {\n $index = $name;\n }\n $test_idx++;\n\n if ($workerID) {\n $PHP_FAILED_TESTS = ['BORKED' => [], 'FAILED' => [], 'WARNED' => [], 'LEAKED' => [], 'XFAILED' => [], 'XLEAKED' => [], 'SLOW' => []];\n ob_start();\n }\n\n $result = run_test($php, $name, $env);\n if ($workerID) {\n $resultText = ob_get_clean();\n }\n\n if (!is_array($name) && $result != 'REDIR') {\n if ($workerID) {\n send_message($workerSock, [\n \"type\" => \"test_result\",\n \"name\" => $name,\n \"index\" => $index,\n \"result\" => $result,\n \"text\" => $resultText,\n \"PHP_FAILED_TESTS\" => $PHP_FAILED_TESTS\n ]);\n continue;\n }\n\n $test_results[$index] = $result;\n if ($failed_tests_file && ($result == 'XFAILED' || $result == 'XLEAKED' || $result == 'FAILED' || $result == 'WARNED' || $result == 'LEAKED')) {\n fwrite($failed_tests_file, \"$index\\n\");\n }\n if ($result_tests_file) {\n fwrite($result_tests_file, \"$result\\t$index\\n\");\n }\n }\n }\n}\n\n\/** The heart of parallel testing.\n * @param string|array|null $redir_tested\n *\/\nfunction run_all_tests_parallel(array $test_files, array $env, $redir_tested): void\n{\n global $workers, $test_idx, $test_cnt, $test_results, $failed_tests_file, $result_tests_file, $PHP_FAILED_TESTS, $shuffle, $SHOW_ONLY_GROUPS, $valgrind, $show_progress;\n\n global $junit;\n\n \/\/ The PHP binary running run-tests.php, and run-tests.php itself\n \/\/ This PHP executable is *not* necessarily the same as the tested version\n $thisPHP = PHP_BINARY;\n $thisScript = __FILE__;\n\n $workerProcs = [];\n $workerSocks = [];\n\n \/\/ Each test may specify a list of conflict keys. While a test that conflicts with\n \/\/ key K is running, no other test that conflicts with K may run. Conflict keys are\n \/\/ specified either in the --CONFLICTS-- section, or CONFLICTS file inside a directory.\n $dirConflictsWith = [];\n $fileConflictsWith = [];\n $sequentialTests = [];\n foreach ($test_files as $i => $file) {\n $contents = file_get_contents($file);\n if (preg_match('\/^--CONFLICTS--(.+?)^--\/ms', $contents, $matches)) {\n $conflicts = parse_conflicts($matches[1]);\n } else {\n \/\/ Cache per-directory conflicts in a separate map, so we compute these only once.\n $dir = dirname($file);\n if (!isset($dirConflictsWith[$dir])) {\n $dirConflicts = [];\n if (file_exists($dir . '\/CONFLICTS')) {\n $contents = file_get_contents($dir . '\/CONFLICTS');\n $dirConflicts = parse_conflicts($contents);\n }\n $dirConflictsWith[$dir] = $dirConflicts;\n }\n $conflicts = $dirConflictsWith[$dir];\n }\n\n \/\/ For tests conflicting with \"all\", no other tests may run in parallel. We'll run these\n \/\/ tests separately at the end, when only one worker is left.\n if (in_array('all', $conflicts, true)) {\n $sequentialTests[] = $file;\n unset($test_files[$i]);\n }\n\n $fileConflictsWith[$file] = $conflicts;\n }\n\n \/\/ Some tests assume that they are executed in a certain order. We will be popping from\n \/\/ $test_files, so reverse its order here. This makes sure that order is preserved at least\n \/\/ for tests with a common conflict key.\n $test_files = array_reverse($test_files);\n\n \/\/ To discover parallelization issues it is useful to randomize the test order.\n if ($shuffle) {\n shuffle($test_files);\n }\n\n \/\/ Don't start more workers than test files.\n $workers = max(1, min($workers, count($test_files)));\n\n echo \"Spawning $workers workers... \";\n\n \/\/ We use sockets rather than STDIN\/STDOUT for comms because on Windows,\n \/\/ those can't be non-blocking for some reason.\n $listenSock = stream_socket_server(\"tcp:\/\/127.0.0.1:0\") or error(\"Couldn't create socket on localhost.\");\n $sockName = stream_socket_get_name($listenSock, false);\n \/\/ PHP is terrible and returns IPv6 addresses not enclosed by []\n $portPos = strrpos($sockName, \":\");\n $sockHost = substr($sockName, 0, $portPos);\n if (false !== strpos($sockHost, \":\")) {\n $sockHost = \"[$sockHost]\";\n }\n $sockPort = substr($sockName, $portPos + 1);\n $sockUri = \"tcp:\/\/$sockHost:$sockPort\";\n $totalFileCount = count($test_files);\n\n $startTime = microtime(true);\n for ($i = 1; $i <= $workers; $i++) {\n $proc = proc_open(\n [$thisPHP, $thisScript],\n [], \/\/ Inherit our stdin, stdout and stderr\n $pipes,\n null,\n $GLOBALS['environment'] + [\n \"TEST_PHP_WORKER\" => $i,\n \"TEST_PHP_URI\" => $sockUri,\n ],\n [\n \"suppress_errors\" => true,\n 'create_new_console' => true,\n ]\n );\n if ($proc === false) {\n kill_children($workerProcs);\n error(\"Failed to spawn worker $i\");\n }\n $workerProcs[$i] = $proc;\n }\n\n for ($i = 1; $i <= $workers; $i++) {\n $workerSock = stream_socket_accept($listenSock, 5);\n if ($workerSock === false) {\n kill_children($workerProcs);\n error(\"Failed to accept connection from worker.\");\n }\n\n $greeting = base64_encode(serialize([\n \"type\" => \"hello\",\n \"GLOBALS\" => $GLOBALS,\n \"constants\" => [\n \"INIT_DIR\" => INIT_DIR,\n \"TEST_PHP_SRCDIR\" => TEST_PHP_SRCDIR,\n \"TRAVIS_CI\" => TRAVIS_CI\n ]\n ])) . \"\\n\";\n\n stream_set_timeout($workerSock, 5);\n if (fwrite($workerSock, $greeting) === false) {\n kill_children($workerProcs);\n error(\"Failed to send greeting to worker.\");\n }\n\n $rawReply = fgets($workerSock);\n if ($rawReply === false) {\n kill_children($workerProcs);\n error(\"Failed to read greeting reply from worker.\");\n }\n\n $reply = unserialize(base64_decode($rawReply));\n if (!$reply || $reply[\"type\"] !== \"hello_reply\") {\n kill_children($workerProcs);\n error(\"Greeting reply from worker unexpected or could not be decoded: '$rawReply'\");\n }\n\n stream_set_timeout($workerSock, 0);\n stream_set_blocking($workerSock, false);\n\n $workerID = $reply[\"workerID\"];\n $workerSocks[$workerID] = $workerSock;\n }\n printf(\"Done in %.2fs\\n\", microtime(true) - $startTime);\n echo \"=====================================================================\\n\";\n echo \"\\n\";\n\n $rawMessageBuffers = [];\n $testsInProgress = 0;\n\n \/\/ Map from conflict key to worker ID.\n $activeConflicts = [];\n \/\/ Tests waiting due to conflicts. Map from conflict key to array.\n $waitingTests = [];\n\nescape:\n while ($test_files || $sequentialTests || $testsInProgress > 0) {\n $toRead = array_values($workerSocks);\n $toWrite = null;\n $toExcept = null;\n if (stream_select($toRead, $toWrite, $toExcept, 10)) {\n foreach ($toRead as $workerSock) {\n $i = array_search($workerSock, $workerSocks);\n if ($i === false) {\n kill_children($workerProcs);\n error(\"Could not find worker stdout in array of worker stdouts, THIS SHOULD NOT HAPPEN.\");\n }\n while (false !== ($rawMessage = fgets($workerSock))) {\n \/\/ work around fgets truncating things\n if (($rawMessageBuffers[$i] ?? '') !== '') {\n $rawMessage = $rawMessageBuffers[$i] . $rawMessage;\n $rawMessageBuffers[$i] = '';\n }\n if (substr($rawMessage, -1) !== \"\\n\") {\n $rawMessageBuffers[$i] = $rawMessage;\n continue;\n }\n\n $message = unserialize(base64_decode($rawMessage));\n if (!$message) {\n kill_children($workerProcs);\n $stuff = fread($workerSock, 65536);\n error(\"Could not decode message from worker $i: '$rawMessage$stuff'\");\n }\n\n switch ($message[\"type\"]) {\n case \"tests_finished\":\n $testsInProgress--;\n foreach ($activeConflicts as $key => $workerId) {\n if ($workerId === $i) {\n unset($activeConflicts[$key]);\n if (isset($waitingTests[$key])) {\n while ($test = array_pop($waitingTests[$key])) {\n $test_files[] = $test;\n }\n unset($waitingTests[$key]);\n }\n }\n }\n $junit->mergeResults($message[\"junit\"]);\n \/\/ no break\n case \"ready\":\n \/\/ Schedule sequential tests only once we are down to one worker.\n if (count($workerProcs) === 1 && $sequentialTests) {\n $test_files = array_merge($test_files, $sequentialTests);\n $sequentialTests = [];\n }\n \/\/ Batch multiple tests to reduce communication overhead.\n \/\/ - When valgrind is used, communication overhead is relatively small,\n \/\/ so just use a batch size of 1.\n \/\/ - If this is running a small enough number of tests,\n \/\/ reduce the batch size to give batches to more workers.\n $files = [];\n $maxBatchSize = $valgrind ? 1 : ($shuffle ? 4 : 32);\n $averageFilesPerWorker = max(1, (int) ceil($totalFileCount \/ count($workerProcs)));\n $batchSize = min($maxBatchSize, $averageFilesPerWorker);\n while (count($files) <= $batchSize && $file = array_pop($test_files)) {\n foreach ($fileConflictsWith[$file] as $conflictKey) {\n if (isset($activeConflicts[$conflictKey])) {\n $waitingTests[$conflictKey][] = $file;\n continue 2;\n }\n }\n $files[] = $file;\n }\n if ($files) {\n foreach ($files as $file) {\n foreach ($fileConflictsWith[$file] as $conflictKey) {\n $activeConflicts[$conflictKey] = $i;\n }\n }\n $testsInProgress++;\n send_message($workerSocks[$i], [\n \"type\" => \"run_tests\",\n \"test_files\" => $files,\n \"env\" => $env,\n \"redir_tested\" => $redir_tested\n ]);\n } else {\n proc_terminate($workerProcs[$i]);\n unset($workerProcs[$i], $workerSocks[$i]);\n goto escape;\n }\n break;\n case \"test_result\":\n list($name, $index, $result, $resultText) = [$message[\"name\"], $message[\"index\"], $message[\"result\"], $message[\"text\"]];\n foreach ($message[\"PHP_FAILED_TESTS\"] as $category => $tests) {\n $PHP_FAILED_TESTS[$category] = array_merge($PHP_FAILED_TESTS[$category], $tests);\n }\n $test_idx++;\n\n if ($show_progress) {\n clear_show_test();\n }\n\n echo $resultText;\n\n if ($show_progress) {\n show_test($test_idx, count($workerProcs) . \"\/$workers concurrent test workers running\");\n }\n\n if (!is_array($name) && $result != 'REDIR') {\n $test_results[$index] = $result;\n\n if ($failed_tests_file && ($result == 'XFAILED' || $result == 'XLEAKED' || $result == 'FAILED' || $result == 'WARNED' || $result == 'LEAKED')) {\n fwrite($failed_tests_file, \"$index\\n\");\n }\n if ($result_tests_file) {\n fwrite($result_tests_file, \"$result\\t$index\\n\");\n }\n }\n break;\n case \"error\":\n kill_children($workerProcs);\n error(\"Worker $i reported error: $message[msg]\");\n break;\n case \"php_error\":\n kill_children($workerProcs);\n $error_consts = [\n 'E_ERROR',\n 'E_WARNING',\n 'E_PARSE',\n 'E_NOTICE',\n 'E_CORE_ERROR',\n 'E_CORE_WARNING',\n 'E_COMPILE_ERROR',\n 'E_COMPILE_WARNING',\n 'E_USER_ERROR',\n 'E_USER_WARNING',\n 'E_USER_NOTICE',\n 'E_STRICT', \/\/ TODO Cleanup when removed from Zend Engine.\n 'E_RECOVERABLE_ERROR',\n 'E_DEPRECATED',\n 'E_USER_DEPRECATED'\n ];\n $error_consts = array_combine(array_map('constant', $error_consts), $error_consts);\n error(\"Worker $i reported unexpected {$error_consts[$message['errno']]}: $message[errstr] in $message[errfile] on line $message[errline]\");\n \/\/ no break\n default:\n kill_children($workerProcs);\n error(\"Unrecognised message type '$message[type]' from worker $i\");\n }\n }\n }\n }\n }\n\n if ($show_progress) {\n clear_show_test();\n }\n\n kill_children($workerProcs);\n\n if ($testsInProgress < 0) {\n error(\"$testsInProgress test batches \u201cin progress\u201d, which is less than zero. THIS SHOULD NOT HAPPEN.\");\n }\n}\n\nfunction send_message($stream, array $message): void\n{\n $blocking = stream_get_meta_data($stream)[\"blocked\"];\n stream_set_blocking($stream, true);\n fwrite($stream, base64_encode(serialize($message)) . \"\\n\");\n stream_set_blocking($stream, $blocking);\n}\n\nfunction kill_children(array $children): void\n{\n foreach ($children as $child) {\n if ($child) {\n proc_terminate($child);\n }\n }\n}\n\nfunction run_worker(): void\n{\n global $workerID, $workerSock;\n\n global $junit;\n\n $sockUri = getenv(\"TEST_PHP_URI\");\n\n $workerSock = stream_socket_client($sockUri, $_, $_, 5) or error(\"Couldn't connect to $sockUri\");\n\n $greeting = fgets($workerSock);\n $greeting = unserialize(base64_decode($greeting)) or die(\"Could not decode greeting\\n\");\n if ($greeting[\"type\"] !== \"hello\") {\n error(\"Unexpected greeting of type $greeting[type]\");\n }\n\n set_error_handler(function (int $errno, string $errstr, string $errfile, int $errline) use ($workerSock): bool {\n if (error_reporting() & $errno) {\n send_message($workerSock, compact('errno', 'errstr', 'errfile', 'errline') + [\n 'type' => 'php_error'\n ]);\n }\n\n return true;\n });\n\n foreach ($greeting[\"GLOBALS\"] as $var => $value) {\n if ($var !== \"workerID\" && $var !== \"workerSock\" && $var !== \"GLOBALS\") {\n $GLOBALS[$var] = $value;\n }\n }\n foreach ($greeting[\"constants\"] as $const => $value) {\n define($const, $value);\n }\n\n send_message($workerSock, [\n \"type\" => \"hello_reply\",\n \"workerID\" => $workerID\n ]);\n\n send_message($workerSock, [\n \"type\" => \"ready\"\n ]);\n\n while (($command = fgets($workerSock))) {\n $command = unserialize(base64_decode($command));\n\n switch ($command[\"type\"]) {\n case \"run_tests\":\n run_all_tests($command[\"test_files\"], $command[\"env\"], $command[\"redir_tested\"]);\n send_message($workerSock, [\n \"type\" => \"tests_finished\",\n \"junit\" => $junit->isEnabled() ? $junit : null,\n ]);\n $junit->clear();\n break;\n default:\n send_message($workerSock, [\n \"type\" => \"error\",\n \"msg\" => \"Unrecognised message type: $command[type]\"\n ]);\n break 2;\n }\n }\n}\n\n\/\/\n\/\/ Show file or result block\n\/\/\nfunction show_file_block(string $file, string $block, ?string $section = null): void\n{\n global $cfg;\n global $colorize;\n\n if ($cfg['show'][$file]) {\n if (is_null($section)) {\n $section = strtoupper($file);\n }\n if ($section === 'DIFF' && $colorize) {\n \/\/ '-' is Light Red for removal, '+' is Light Green for addition\n $block = preg_replace('\/^[0-9]+\\-\\s.*$\/m', \"\\e[1;31m\\\\0\\e[0m\", $block);\n $block = preg_replace('\/^[0-9]+\\+\\s.*$\/m', \"\\e[1;32m\\\\0\\e[0m\", $block);\n }\n\n echo \"\\n========\" . $section . \"========\\n\";\n echo rtrim($block);\n echo \"\\n========DONE========\\n\";\n }\n}\n\nfunction skip_test(string $tested, string $tested_file, string $shortname, string $reason) {\n global $junit;\n\n show_result('SKIP', $tested, $tested_file, \"reason: $reason\");\n $junit->initSuite($junit->getSuiteName($shortname));\n $junit->markTestAs('SKIP', $shortname, $tested, 0, $reason);\n return 'SKIPPED';\n}\n\n\/\/\n\/\/ Run an individual test case.\n\/\/\n\/**\n * @param string|array $file\n *\/\nfunction run_test(string $php, $file, array $env): string\n{\n global $log_format, $ini_overwrites, $PHP_FAILED_TESTS;\n global $pass_options, $DETAILED, $IN_REDIRECT, $test_cnt, $test_idx;\n global $valgrind, $temp_source, $temp_target, $cfg, $environment;\n global $no_clean;\n global $SHOW_ONLY_GROUPS;\n global $no_file_cache;\n global $slow_min_ms;\n global $preload, $file_cache;\n global $num_repeats;\n \/\/ Parallel testing\n global $workerID;\n global $show_progress;\n\n \/\/ Temporary\n \/** @var JUnit *\/\n global $junit;\n\n static $skipCache;\n if (!$skipCache) {\n $enableSkipCache = !($env['DISABLE_SKIP_CACHE'] ?? '0');\n $skipCache = new SkipCache($enableSkipCache, $cfg['keep']['skip']);\n }\n\n $temp_filenames = null;\n $org_file = $file;\n $orig_php = $php;\n\n $php_cgi = $env['TEST_PHP_CGI_EXECUTABLE'] ?? null;\n $phpdbg = $env['TEST_PHPDBG_EXECUTABLE'] ?? null;\n\n if (is_array($file)) {\n $file = $file[0];\n }\n\n if ($DETAILED) {\n echo \"\n=================\nTEST $file\n\";\n }\n\n $shortname = str_replace(TEST_PHP_SRCDIR . '\/', '', $file);\n $tested_file = $shortname;\n\n try {\n $test = new TestFile($file, (bool)$IN_REDIRECT);\n } catch (BorkageException $ex) {\n show_result(\"BORK\", $ex->getMessage(), $tested_file);\n $PHP_FAILED_TESTS['BORKED'][] = [\n 'name' => $file,\n 'test_name' => '',\n 'output' => '',\n 'diff' => '',\n 'info' => \"{$ex->getMessage()} [$file]\",\n ];\n\n $junit->markTestAs('BORK', $shortname, $tested_file, 0, $ex->getMessage());\n return 'BORKED';\n }\n\n $tested = $test->getName();\n\n if ($num_repeats > 1 && $test->hasSection('FILE_EXTERNAL')) {\n return skip_test($tested, $tested_file, $shortname, 'Test with FILE_EXTERNAL might not be repeatable');\n }\n\n if ($test->hasSection('CAPTURE_STDIO')) {\n $capture = $test->getSection('CAPTURE_STDIO');\n $captureStdIn = stripos($capture, 'STDIN') !== false;\n $captureStdOut = stripos($capture, 'STDOUT') !== false;\n $captureStdErr = stripos($capture, 'STDERR') !== false;\n } else {\n $captureStdIn = true;\n $captureStdOut = true;\n $captureStdErr = true;\n }\n if ($captureStdOut && $captureStdErr) {\n $cmdRedirect = ' 2>&1';\n } else {\n $cmdRedirect = '';\n }\n\n \/* For GET\/POST\/PUT tests, check if cgi sapi is available and if it is, use it. *\/\n if ($test->isCGI()) {\n if (!$php_cgi) {\n return skip_test($tested, $tested_file, $shortname, 'CGI not available');\n }\n $php = $php_cgi . ' -C ';\n $uses_cgi = true;\n if ($num_repeats > 1) {\n return skip_test($tested, $tested_file, $shortname, 'CGI does not support --repeat');\n }\n }\n\n \/* For phpdbg tests, check if phpdbg sapi is available and if it is, use it. *\/\n $extra_options = '';\n if ($test->hasSection('PHPDBG')) {\n if (isset($phpdbg)) {\n $php = $phpdbg . ' -qIb';\n\n \/\/ Additional phpdbg command line options for sections that need to\n \/\/ be run straight away. For example, EXTENSIONS, SKIPIF, CLEAN.\n $extra_options = '-rr';\n } else {\n return skip_test($tested, $tested_file, $shortname, 'phpdbg not available');\n }\n if ($num_repeats > 1) {\n return skip_test($tested, $tested_file, $shortname, 'phpdbg does not support --repeat');\n }\n }\n\n if ($num_repeats > 1) {\n if ($test->hasSection('CLEAN')) {\n return skip_test($tested, $tested_file, $shortname, 'Test with CLEAN might not be repeatable');\n }\n if ($test->hasSection('STDIN')) {\n return skip_test($tested, $tested_file, $shortname, 'Test with STDIN might not be repeatable');\n }\n if ($test->hasSection('CAPTURE_STDIO')) {\n return skip_test($tested, $tested_file, $shortname, 'Test with CAPTURE_STDIO might not be repeatable');\n }\n }\n\n if ($show_progress && !$workerID) {\n show_test($test_idx, $shortname);\n }\n\n if (is_array($IN_REDIRECT)) {\n $temp_dir = $test_dir = $IN_REDIRECT['dir'];\n } else {\n $temp_dir = $test_dir = realpath(dirname($file));\n }\n\n if ($temp_source && $temp_target) {\n $temp_dir = str_replace($temp_source, $temp_target, $temp_dir);\n }\n\n $main_file_name = basename($file, 'phpt');\n\n $diff_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'diff';\n $log_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'log';\n $exp_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'exp';\n $output_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'out';\n $memcheck_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'mem';\n $sh_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'sh';\n $temp_file = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'php';\n $test_file = $test_dir . DIRECTORY_SEPARATOR . $main_file_name . 'php';\n $temp_skipif = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'skip.php';\n $test_skipif = $test_dir . DIRECTORY_SEPARATOR . $main_file_name . 'skip.php';\n $temp_clean = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'clean.php';\n $test_clean = $test_dir . DIRECTORY_SEPARATOR . $main_file_name . 'clean.php';\n $preload_filename = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'preload.php';\n $tmp_post = $temp_dir . DIRECTORY_SEPARATOR . $main_file_name . 'post';\n $tmp_relative_file = str_replace(__DIR__ . DIRECTORY_SEPARATOR, '', $test_file) . 't';\n\n if ($temp_source && $temp_target) {\n $temp_skipif .= 's';\n $temp_file .= 's';\n $temp_clean .= 's';\n $copy_file = $temp_dir . DIRECTORY_SEPARATOR . basename(is_array($file) ? $file[1] : $file) . '.phps';\n\n if (!is_dir(dirname($copy_file))) {\n mkdir(dirname($copy_file), 0777, true) or error(\"Cannot create output directory - \" . dirname($copy_file));\n }\n\n if ($test->hasSection('FILE')) {\n save_text($copy_file, $test->getSection('FILE'));\n }\n\n $temp_filenames = [\n 'file' => $copy_file,\n 'diff' => $diff_filename,\n 'log' => $log_filename,\n 'exp' => $exp_filename,\n 'out' => $output_filename,\n 'mem' => $memcheck_filename,\n 'sh' => $sh_filename,\n 'php' => $temp_file,\n 'skip' => $temp_skipif,\n 'clean' => $temp_clean\n ];\n }\n\n if (is_array($IN_REDIRECT)) {\n $tested = $IN_REDIRECT['prefix'] . ' ' . $tested;\n $tested_file = $tmp_relative_file;\n $shortname = str_replace(TEST_PHP_SRCDIR . '\/', '', $tested_file);\n }\n\n \/\/ unlink old test results\n @unlink($diff_filename);\n @unlink($log_filename);\n @unlink($exp_filename);\n @unlink($output_filename);\n @unlink($memcheck_filename);\n @unlink($sh_filename);\n @unlink($temp_file);\n @unlink($test_file);\n @unlink($temp_skipif);\n @unlink($test_skipif);\n @unlink($tmp_post);\n @unlink($temp_clean);\n @unlink($test_clean);\n @unlink($preload_filename);\n\n \/\/ Reset environment from any previous test.\n $env['REDIRECT_STATUS'] = '';\n $env['QUERY_STRING'] = '';\n $env['PATH_TRANSLATED'] = '';\n $env['SCRIPT_FILENAME'] = '';\n $env['REQUEST_METHOD'] = '';\n $env['CONTENT_TYPE'] = '';\n $env['CONTENT_LENGTH'] = '';\n $env['TZ'] = '';\n\n if ($test->sectionNotEmpty('ENV')) {\n $env_str = str_replace('{PWD}', dirname($file), $test->getSection('ENV'));\n foreach (explode(\"\\n\", $env_str) as $e) {\n $e = explode('=', trim($e), 2);\n\n if (!empty($e[0]) && isset($e[1])) {\n $env[$e[0]] = $e[1];\n }\n }\n }\n\n \/\/ Default ini settings\n $ini_settings = $workerID ? ['opcache.cache_id' => \"worker$workerID\"] : [];\n\n \/\/ Additional required extensions\n $extensions = [];\n if ($test->hasSection('EXTENSIONS')) {\n $extensions = preg_split(\"\/[\\n\\r]+\/\", trim($test->getSection('EXTENSIONS')));\n }\n if (is_array($IN_REDIRECT) && $IN_REDIRECT['EXTENSIONS'] != []) {\n $extensions = array_merge($extensions, $IN_REDIRECT['EXTENSIONS']);\n }\n\n \/* Load required extensions *\/\n if ($extensions != []) {\n $ext_params = [];\n settings2array($ini_overwrites, $ext_params);\n $ext_params = settings2params($ext_params);\n [$ext_dir, $loaded] = $skipCache->getExtensions(\"$orig_php $pass_options $extra_options $ext_params $no_file_cache\");\n $ext_prefix = IS_WINDOWS ? \"php_\" : \"\";\n $missing = [];\n foreach ($extensions as $req_ext) {\n if (!in_array(strtolower($req_ext), $loaded)) {\n if ($req_ext == 'opcache' || $req_ext == 'xdebug') {\n $ext_file = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX;\n $ini_settings['zend_extension'][] = $ext_file;\n } else {\n $ext_file = $ext_dir . DIRECTORY_SEPARATOR . $ext_prefix . $req_ext . '.' . PHP_SHLIB_SUFFIX;\n $ini_settings['extension'][] = $ext_file;\n }\n if (!is_readable($ext_file)) {\n $missing[] = $req_ext;\n }\n }\n }\n if ($missing) {\n $message = 'Required extension' . (count($missing) > 1 ? 's' : '')\n . ' missing: ' . implode(', ', $missing);\n return skip_test($tested, $tested_file, $shortname, $message);\n }\n }\n\n \/\/ additional ini overwrites\n \/\/$ini_overwrites[] = 'setting=value';\n settings2array($ini_overwrites, $ini_settings);\n\n $orig_ini_settings = settings2params($ini_settings);\n\n if ($file_cache !== null) {\n $ini_settings['opcache.file_cache'] = '\/tmp';\n \/\/ Make sure warnings still show up on the second run.\n $ini_settings['opcache.record_warnings'] = '1';\n \/\/ File cache is currently incompatible with JIT.\n $ini_settings['opcache.jit'] = '0';\n if ($file_cache === 'use') {\n \/\/ Disable timestamp validation in order to fetch from file cache,\n \/\/ even though all the files are re-created.\n $ini_settings['opcache.validate_timestamps'] = '0';\n }\n } else if ($num_repeats > 1) {\n \/\/ Make sure warnings still show up on the second run.\n $ini_settings['opcache.record_warnings'] = '1';\n }\n if (extension_loaded('posix') && posix_getuid() === 0) {\n $ini_settings['opcache.preload_user'] = 'root';\n }\n\n \/\/ Any special ini settings\n \/\/ these may overwrite the test defaults...\n if ($test->hasSection('INI')) {\n $ini = str_replace('{PWD}', dirname($file), $test->getSection('INI'));\n $ini = str_replace('{TMP}', sys_get_temp_dir(), $ini);\n $replacement = IS_WINDOWS ? '\"' . PHP_BINARY . ' -r \\\"while ($in = fgets(STDIN)) echo $in;\\\" > $1\"' : 'tee $1 >\/dev\/null';\n $ini = preg_replace('\/{MAIL:(\\S+)}\/', $replacement, $ini);\n settings2array(preg_split(\"\/[\\n\\r]+\/\", $ini), $ini_settings);\n\n if ($num_repeats > 1 && isset($ini_settings['opcache.opt_debug_level'])) {\n return skip_test($tested, $tested_file, $shortname, 'opt_debug_level tests are not repeatable');\n }\n }\n\n $ini_settings = settings2params($ini_settings);\n\n $env['TEST_PHP_EXTRA_ARGS'] = $pass_options . ' ' . $ini_settings;\n\n \/\/ Check if test should be skipped.\n $info = '';\n $warn = false;\n\n if ($test->sectionNotEmpty('SKIPIF')) {\n show_file_block('skip', $test->getSection('SKIPIF'));\n $extra = !IS_WINDOWS ?\n \"unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;\" : \"\";\n\n if ($valgrind) {\n $env['USE_ZEND_ALLOC'] = '0';\n $env['ZEND_DONT_UNLOAD_MODULES'] = 1;\n }\n\n $junit->startTimer($shortname);\n\n $startTime = microtime(true);\n $commandLine = \"$extra $php $pass_options $extra_options -q $orig_ini_settings $no_file_cache -d display_errors=1 -d display_startup_errors=0\";\n $output = $skipCache->checkSkip($commandLine, $test->getSection('SKIPIF'), $test_skipif, $temp_skipif, $env);\n\n $time = microtime(true) - $startTime;\n $junit->stopTimer($shortname);\n\n if ($time > $slow_min_ms \/ 1000) {\n $PHP_FAILED_TESTS['SLOW'][] = [\n 'name' => $file,\n 'test_name' => 'SKIPIF of ' . $tested . \" [$tested_file]\",\n 'output' => '',\n 'diff' => '',\n 'info' => $time,\n ];\n }\n\n if (!$cfg['keep']['skip']) {\n @unlink($test_skipif);\n }\n\n if (!strncasecmp('skip', $output, 4)) {\n if (preg_match('\/^skip\\s*(.+)\/i', $output, $m)) {\n show_result('SKIP', $tested, $tested_file, \"reason: $m[1]\", $temp_filenames);\n } else {\n show_result('SKIP', $tested, $tested_file, '', $temp_filenames);\n }\n\n $message = !empty($m[1]) ? $m[1] : '';\n $junit->markTestAs('SKIP', $shortname, $tested, null, $message);\n return 'SKIPPED';\n }\n\n\n if (!strncasecmp('info', $output, 4) && preg_match('\/^info\\s*(.+)\/i', $output, $m)) {\n $info = \" (info: $m[1])\";\n } elseif (!strncasecmp('warn', $output, 4) && preg_match('\/^warn\\s+(.+)\/i', $output, $m)) {\n $warn = true; \/* only if there is a reason *\/\n $info = \" (warn: $m[1])\";\n } elseif (!strncasecmp('xfail', $output, 5)) {\n \/\/ Pretend we have an XFAIL section\n $test->setSection('XFAIL', ltrim(substr($output, 5)));\n } elseif ($output !== '') {\n show_result(\"BORK\", $output, $tested_file, 'reason: invalid output from SKIPIF', $temp_filenames);\n $PHP_FAILED_TESTS['BORKED'][] = [\n 'name' => $file,\n 'test_name' => '',\n 'output' => '',\n 'diff' => '',\n 'info' => \"$output [$file]\",\n ];\n\n $junit->markTestAs('BORK', $shortname, $tested, null, $output);\n return 'BORKED';\n }\n }\n\n if (!extension_loaded(\"zlib\") && $test->hasAnySections(\"GZIP_POST\", \"DEFLATE_POST\")) {\n $message = \"ext\/zlib required\";\n show_result('SKIP', $tested, $tested_file, \"reason: $message\", $temp_filenames);\n $junit->markTestAs('SKIP', $shortname, $tested, null, $message);\n return 'SKIPPED';\n }\n\n if ($test->hasSection('REDIRECTTEST')) {\n $test_files = [];\n\n $IN_REDIRECT = eval($test->getSection('REDIRECTTEST'));\n $IN_REDIRECT['via'] = \"via [$shortname]\\n\\t\";\n $IN_REDIRECT['dir'] = realpath(dirname($file));\n $IN_REDIRECT['prefix'] = $tested;\n $IN_REDIRECT['EXTENSIONS'] = $extensions;\n\n if (!empty($IN_REDIRECT['TESTS'])) {\n if (is_array($org_file)) {\n $test_files[] = $org_file[1];\n } else {\n $GLOBALS['test_files'] = $test_files;\n find_files($IN_REDIRECT['TESTS']);\n\n foreach ($GLOBALS['test_files'] as $f) {\n $test_files[] = [$f, $file];\n }\n }\n $test_cnt += count($test_files) - 1;\n $test_idx--;\n\n show_redirect_start($IN_REDIRECT['TESTS'], $tested, $tested_file);\n\n \/\/ set up environment\n $redirenv = array_merge($environment, $IN_REDIRECT['ENV']);\n $redirenv['REDIR_TEST_DIR'] = realpath($IN_REDIRECT['TESTS']) . DIRECTORY_SEPARATOR;\n\n usort($test_files, \"test_sort\");\n run_all_tests($test_files, $redirenv, $tested);\n\n show_redirect_ends($IN_REDIRECT['TESTS'], $tested, $tested_file);\n\n \/\/ a redirected test never fails\n $IN_REDIRECT = false;\n\n $junit->markTestAs('PASS', $shortname, $tested);\n return 'REDIR';\n }\n\n $bork_info = \"Redirect info must contain exactly one TEST string to be used as redirect directory.\";\n show_result(\"BORK\", $bork_info, '', '', $temp_filenames);\n $PHP_FAILED_TESTS['BORKED'][] = [\n 'name' => $file,\n 'test_name' => '',\n 'output' => '',\n 'diff' => '',\n 'info' => \"$bork_info [$file]\",\n ];\n }\n\n if (is_array($org_file) || $test->hasSection('REDIRECTTEST')) {\n if (is_array($org_file)) {\n $file = $org_file[0];\n }\n\n $bork_info = \"Redirected test did not contain redirection info\";\n show_result(\"BORK\", $bork_info, '', '', $temp_filenames);\n $PHP_FAILED_TESTS['BORKED'][] = [\n 'name' => $file,\n 'test_name' => '',\n 'output' => '',\n 'diff' => '',\n 'info' => \"$bork_info [$file]\",\n ];\n\n $junit->markTestAs('BORK', $shortname, $tested, null, $bork_info);\n\n return 'BORKED';\n }\n\n \/\/ We've satisfied the preconditions - run the test!\n if ($test->hasSection('FILE')) {\n show_file_block('php', $test->getSection('FILE'), 'TEST');\n save_text($test_file, $test->getSection('FILE'), $temp_file);\n } else {\n $test_file = $temp_file = \"\";\n }\n\n if ($test->hasSection('GET')) {\n $query_string = trim($test->getSection('GET'));\n } else {\n $query_string = '';\n }\n\n $env['REDIRECT_STATUS'] = '1';\n if (empty($env['QUERY_STRING'])) {\n $env['QUERY_STRING'] = $query_string;\n }\n if (empty($env['PATH_TRANSLATED'])) {\n $env['PATH_TRANSLATED'] = $test_file;\n }\n if (empty($env['SCRIPT_FILENAME'])) {\n $env['SCRIPT_FILENAME'] = $test_file;\n }\n\n if ($test->hasSection('COOKIE')) {\n $env['HTTP_COOKIE'] = trim($test->getSection('COOKIE'));\n } else {\n $env['HTTP_COOKIE'] = '';\n }\n\n $args = $test->hasSection('ARGS') ? ' -- ' . $test->getSection('ARGS') : '';\n\n if ($preload && !empty($test_file)) {\n save_text($preload_filename, \"<?php opcache_compile_file('$test_file');\");\n $local_pass_options = $pass_options;\n unset($pass_options);\n $pass_options = $local_pass_options;\n $pass_options .= \" -d opcache.preload=\" . $preload_filename;\n }\n\n if ($test->sectionNotEmpty('POST_RAW')) {\n $post = trim($test->getSection('POST_RAW'));\n $raw_lines = explode(\"\\n\", $post);\n\n $request = '';\n $started = false;\n\n foreach ($raw_lines as $line) {\n if (empty($env['CONTENT_TYPE']) && preg_match('\/^Content-Type:(.*)\/i', $line, $res)) {\n $env['CONTENT_TYPE'] = trim(str_replace(\"\\r\", '', $res[1]));\n continue;\n }\n\n if ($started) {\n $request .= \"\\n\";\n }\n\n $started = true;\n $request .= $line;\n }\n\n $env['CONTENT_LENGTH'] = strlen($request);\n $env['REQUEST_METHOD'] = 'POST';\n\n if (empty($request)) {\n $junit->markTestAs('BORK', $shortname, $tested, null, 'empty $request');\n return 'BORKED';\n }\n\n save_text($tmp_post, $request);\n $cmd = \"$php $pass_options $ini_settings -f \\\"$test_file\\\"$cmdRedirect < \\\"$tmp_post\\\"\";\n } elseif ($test->sectionNotEmpty('PUT')) {\n $post = trim($test->getSection('PUT'));\n $raw_lines = explode(\"\\n\", $post);\n\n $request = '';\n $started = false;\n\n foreach ($raw_lines as $line) {\n if (empty($env['CONTENT_TYPE']) && preg_match('\/^Content-Type:(.*)\/i', $line, $res)) {\n $env['CONTENT_TYPE'] = trim(str_replace(\"\\r\", '', $res[1]));\n continue;\n }\n\n if ($started) {\n $request .= \"\\n\";\n }\n\n $started = true;\n $request .= $line;\n }\n\n $env['CONTENT_LENGTH'] = strlen($request);\n $env['REQUEST_METHOD'] = 'PUT';\n\n if (empty($request)) {\n $junit->markTestAs('BORK', $shortname, $tested, null, 'empty $request');\n return 'BORKED';\n }\n\n save_text($tmp_post, $request);\n $cmd = \"$php $pass_options $ini_settings -f \\\"$test_file\\\"$cmdRedirect < \\\"$tmp_post\\\"\";\n } elseif ($test->sectionNotEmpty('POST')) {\n $post = trim($test->getSection('POST'));\n $content_length = strlen($post);\n save_text($tmp_post, $post);\n\n $env['REQUEST_METHOD'] = 'POST';\n if (empty($env['CONTENT_TYPE'])) {\n $env['CONTENT_TYPE'] = 'application\/x-www-form-urlencoded';\n }\n\n if (empty($env['CONTENT_LENGTH'])) {\n $env['CONTENT_LENGTH'] = $content_length;\n }\n\n $cmd = \"$php $pass_options $ini_settings -f \\\"$test_file\\\"$cmdRedirect < \\\"$tmp_post\\\"\";\n } elseif ($test->sectionNotEmpty('GZIP_POST')) {\n $post = trim($test->getSection('GZIP_POST'));\n $post = gzencode($post, 9, FORCE_GZIP);\n $env['HTTP_CONTENT_ENCODING'] = 'gzip';\n\n save_text($tmp_post, $post);\n $content_length = strlen($post);\n\n $env['REQUEST_METHOD'] = 'POST';\n $env['CONTENT_TYPE'] = 'application\/x-www-form-urlencoded';\n $env['CONTENT_LENGTH'] = $content_length;\n\n $cmd = \"$php $pass_options $ini_settings -f \\\"$test_file\\\"$cmdRedirect < \\\"$tmp_post\\\"\";\n } elseif ($test->sectionNotEmpty('DEFLATE_POST')) {\n $post = trim($test->getSection('DEFLATE_POST'));\n $post = gzcompress($post, 9);\n $env['HTTP_CONTENT_ENCODING'] = 'deflate';\n save_text($tmp_post, $post);\n $content_length = strlen($post);\n\n $env['REQUEST_METHOD'] = 'POST';\n $env['CONTENT_TYPE'] = 'application\/x-www-form-urlencoded';\n $env['CONTENT_LENGTH'] = $content_length;\n\n $cmd = \"$php $pass_options $ini_settings -f \\\"$test_file\\\"$cmdRedirect < \\\"$tmp_post\\\"\";\n } else {\n $env['REQUEST_METHOD'] = 'GET';\n $env['CONTENT_TYPE'] = '';\n $env['CONTENT_LENGTH'] = '';\n\n $repeat_option = $num_repeats > 1 ? \"--repeat $num_repeats\" : \"\";\n $cmd = \"$php $pass_options $repeat_option $ini_settings -f \\\"$test_file\\\" $args$cmdRedirect\";\n }\n\n $orig_cmd = $cmd;\n if ($valgrind) {\n $env['USE_ZEND_ALLOC'] = '0';\n $env['ZEND_DONT_UNLOAD_MODULES'] = 1;\n\n $cmd = $valgrind->wrapCommand($cmd, $memcheck_filename, strpos($test_file, \"pcre\") !== false);\n }\n\n if ($DETAILED) {\n echo \"\nCONTENT_LENGTH = \" . $env['CONTENT_LENGTH'] . \"\nCONTENT_TYPE = \" . $env['CONTENT_TYPE'] . \"\nPATH_TRANSLATED = \" . $env['PATH_TRANSLATED'] . \"\nQUERY_STRING = \" . $env['QUERY_STRING'] . \"\nREDIRECT_STATUS = \" . $env['REDIRECT_STATUS'] . \"\nREQUEST_METHOD = \" . $env['REQUEST_METHOD'] . \"\nSCRIPT_FILENAME = \" . $env['SCRIPT_FILENAME'] . \"\nHTTP_COOKIE = \" . $env['HTTP_COOKIE'] . \"\nCOMMAND $cmd\n\";\n }\n\n $junit->startTimer($shortname);\n $hrtime = hrtime();\n $startTime = $hrtime[0] * 1000000000 + $hrtime[1];\n\n $stdin = $test->hasSection('STDIN') ? $test->getSection('STDIN') : null;\n $out = system_with_timeout($cmd, $env, $stdin, $captureStdIn, $captureStdOut, $captureStdErr);\n\n $junit->stopTimer($shortname);\n $hrtime = hrtime();\n $time = $hrtime[0] * 1000000000 + $hrtime[1] - $startTime;\n if ($time >= $slow_min_ms * 1000000) {\n $PHP_FAILED_TESTS['SLOW'][] = [\n 'name' => $file,\n 'test_name' => $tested . \" [$tested_file]\",\n 'output' => '',\n 'diff' => '',\n 'info' => $time \/ 1000000000,\n ];\n }\n\n \/\/ Remember CLEAN output to report borked test if it otherwise passes.\n $clean_output = null;\n if ((!$no_clean || $cfg['keep']['clean']) && $test->sectionNotEmpty('CLEAN')) {\n show_file_block('clean', $test->getSection('CLEAN'));\n save_text($test_clean, trim($test->getSection('CLEAN')), $temp_clean);\n\n if (!$no_clean) {\n $extra = !IS_WINDOWS ?\n \"unset REQUEST_METHOD; unset QUERY_STRING; unset PATH_TRANSLATED; unset SCRIPT_FILENAME; unset REQUEST_METHOD;\" : \"\";\n $clean_output = system_with_timeout(\"$extra $orig_php $pass_options -q $orig_ini_settings $no_file_cache \\\"$test_clean\\\"\", $env);\n }\n\n if (!$cfg['keep']['clean']) {\n @unlink($test_clean);\n }\n }\n\n $leaked = false;\n $passed = false;\n\n if ($valgrind) { \/\/ leak check\n $leaked = filesize($memcheck_filename) > 0;\n\n if (!$leaked) {\n @unlink($memcheck_filename);\n }\n }\n\n if ($num_repeats > 1) {\n \/\/ In repeat mode, retain the output before the first execution,\n \/\/ and of the last execution. Do this early, because the trimming below\n \/\/ makes the newline handling complicated.\n $separator1 = \"Executing for the first time...\\n\";\n $separator1_pos = strpos($out, $separator1);\n if ($separator1_pos !== false) {\n $separator2 = \"Finished execution, repeating...\\n\";\n $separator2_pos = strrpos($out, $separator2);\n if ($separator2_pos !== false) {\n $out = substr($out, 0, $separator1_pos)\n . substr($out, $separator2_pos + strlen($separator2));\n } else {\n $out = substr($out, 0, $separator1_pos)\n . substr($out, $separator1_pos + strlen($separator1));\n }\n }\n }\n\n \/\/ Does the output match what is expected?\n $output = preg_replace(\"\/\\r\\n\/\", \"\\n\", trim($out));\n\n \/* when using CGI, strip the headers from the output *\/\n $headers = [];\n\n if (!empty($uses_cgi) && preg_match(\"\/^(.*?)\\r?\\n\\r?\\n(.*)\/s\", $out, $match)) {\n $output = trim($match[2]);\n $rh = preg_split(\"\/[\\n\\r]+\/\", $match[1]);\n\n foreach ($rh as $line) {\n if (strpos($line, ':') !== false) {\n $line = explode(':', $line, 2);\n $headers[trim($line[0])] = trim($line[1]);\n }\n }\n }\n\n $wanted_headers = null;\n $output_headers = null;\n $failed_headers = false;\n\n if ($test->hasSection('EXPECTHEADERS')) {\n $want = [];\n $wanted_headers = [];\n $lines = preg_split(\"\/[\\n\\r]+\/\", $test->getSection('EXPECTHEADERS'));\n\n foreach ($lines as $line) {\n if (strpos($line, ':') !== false) {\n $line = explode(':', $line, 2);\n $want[trim($line[0])] = trim($line[1]);\n $wanted_headers[] = trim($line[0]) . ': ' . trim($line[1]);\n }\n }\n\n $output_headers = [];\n\n foreach ($want as $k => $v) {\n if (isset($headers[$k])) {\n $output_headers[] = $k . ': ' . $headers[$k];\n }\n\n if (!isset($headers[$k]) || $headers[$k] != $v) {\n $failed_headers = true;\n }\n }\n\n ksort($wanted_headers);\n $wanted_headers = implode(\"\\n\", $wanted_headers);\n ksort($output_headers);\n $output_headers = implode(\"\\n\", $output_headers);\n }\n\n show_file_block('out', $output);\n\n if ($preload) {\n $output = trim(preg_replace(\"\/\\n?Warning: Can't preload [^\\n]*\\n?\/\", \"\", $output));\n }\n\n if ($test->hasAnySections('EXPECTF', 'EXPECTREGEX')) {\n if ($test->hasSection('EXPECTF')) {\n $wanted = trim($test->getSection('EXPECTF'));\n } else {\n $wanted = trim($test->getSection('EXPECTREGEX'));\n }\n\n show_file_block('exp', $wanted);\n $wanted_re = preg_replace('\/\\r\\n\/', \"\\n\", $wanted);\n\n if ($test->hasSection('EXPECTF')) {\n \/\/ do preg_quote, but miss out any %r delimited sections\n $temp = \"\";\n $r = \"%r\";\n $startOffset = 0;\n $length = strlen($wanted_re);\n while ($startOffset < $length) {\n $start = strpos($wanted_re, $r, $startOffset);\n if ($start !== false) {\n \/\/ we have found a start tag\n $end = strpos($wanted_re, $r, $start + 2);\n if ($end === false) {\n \/\/ unbalanced tag, ignore it.\n $end = $start = $length;\n }\n } else {\n \/\/ no more %r sections\n $start = $end = $length;\n }\n \/\/ quote a non re portion of the string\n $temp .= preg_quote(substr($wanted_re, $startOffset, $start - $startOffset), '\/');\n \/\/ add the re unquoted.\n if ($end > $start) {\n $temp .= '(' . substr($wanted_re, $start + 2, $end - $start - 2) . ')';\n }\n $startOffset = $end + 2;\n }\n $wanted_re = $temp;\n\n \/\/ Stick to basics\n $wanted_re = strtr($wanted_re, [\n '%e' => preg_quote(DIRECTORY_SEPARATOR, '\/'),\n '%s' => '[^\\r\\n]+',\n '%S' => '[^\\r\\n]*',\n '%a' => '.+',\n '%A' => '.*',\n '%w' => '\\s*',\n '%i' => '[+-]?\\d+',\n '%d' => '\\d+',\n '%x' => '[0-9a-fA-F]+',\n '%f' => '[+-]?(?:\\d+|(?=\\.\\d))(?:\\.\\d+)?(?:[Ee][+-]?\\d+)?',\n '%c' => '.',\n '%0' => '\\x00',\n ]);\n }\n\n if (preg_match('\/^' . $wanted_re . '$\/s', $output)) {\n $passed = true;\n }\n } else {\n $wanted = trim($test->getSection('EXPECT'));\n $wanted = preg_replace('\/\\r\\n\/', \"\\n\", $wanted);\n show_file_block('exp', $wanted);\n\n \/\/ compare and leave on success\n if (!strcmp($output, $wanted)) {\n $passed = true;\n }\n\n $wanted_re = null;\n }\n\n if ($passed) {\n if (!$cfg['keep']['php'] && !$leaked) {\n @unlink($test_file);\n @unlink($preload_filename);\n }\n @unlink($tmp_post);\n\n if (!$leaked && !$failed_headers) {\n \/\/ If the test passed and CLEAN produced output, report test as borked.\n if ($clean_output) {\n show_result(\"BORK\", $output, $tested_file, 'reason: invalid output from CLEAN', $temp_filenames);\n $PHP_FAILED_TESTS['BORKED'][] = [\n 'name' => $file,\n 'test_name' => '',\n 'output' => '',\n 'diff' => '',\n 'info' => \"$clean_output [$file]\",\n ];\n\n $junit->markTestAs('BORK', $shortname, $tested, null, $clean_output);\n return 'BORKED';\n }\n\n if ($test->hasSection('XFAIL')) {\n $warn = true;\n $info = \" (warn: XFAIL section but test passes)\";\n } elseif ($test->hasSection('XLEAK')) {\n $warn = true;\n $info = \" (warn: XLEAK section but test passes)\";\n } else {\n show_result(\"PASS\", $tested, $tested_file, '', $temp_filenames);\n $junit->markTestAs('PASS', $shortname, $tested);\n return 'PASSED';\n }\n }\n }\n\n \/\/ Test failed so we need to report details.\n if ($failed_headers) {\n $passed = false;\n $wanted = $wanted_headers . \"\\n--HEADERS--\\n\" . $wanted;\n $output = $output_headers . \"\\n--HEADERS--\\n\" . $output;\n\n if (isset($wanted_re)) {\n $wanted_re = preg_quote($wanted_headers . \"\\n--HEADERS--\\n\", '\/') . $wanted_re;\n }\n }\n\n $restype = [];\n\n if ($leaked) {\n $restype[] = $test->hasSection('XLEAK') ?\n 'XLEAK' : 'LEAK';\n }\n\n if ($warn) {\n $restype[] = 'WARN';\n }\n\n if (!$passed) {\n if ($test->hasSection('XFAIL')) {\n $restype[] = 'XFAIL';\n $info = ' XFAIL REASON: ' . rtrim($test->getSection('XFAIL'));\n } elseif ($test->hasSection('XLEAK')) {\n $restype[] = 'XLEAK';\n $info = ' XLEAK REASON: ' . rtrim($test->getSection('XLEAK'));\n } else {\n $restype[] = 'FAIL';\n }\n }\n\n if (!$passed) {\n \/\/ write .exp\n if (strpos($log_format, 'E') !== false && file_put_contents($exp_filename, $wanted) === false) {\n error(\"Cannot create expected test output - $exp_filename\");\n }\n\n \/\/ write .out\n if (strpos($log_format, 'O') !== false && file_put_contents($output_filename, $output) === false) {\n error(\"Cannot create test output - $output_filename\");\n }\n\n \/\/ write .diff\n if (!empty($environment['TEST_PHP_DIFF_CMD'])) {\n $diff = generate_diff_external($environment['TEST_PHP_DIFF_CMD'], $exp_filename, $output_filename);\n } else {\n $diff = generate_diff($wanted, $wanted_re, $output);\n }\n\n if (is_array($IN_REDIRECT)) {\n $orig_shortname = str_replace(TEST_PHP_SRCDIR . '\/', '', $file);\n $diff = \"# original source file: $orig_shortname\\n\" . $diff;\n }\n if (!$SHOW_ONLY_GROUPS || array_intersect($restype, $SHOW_ONLY_GROUPS)) {\n show_file_block('diff', $diff);\n }\n if (strpos($log_format, 'D') !== false && file_put_contents($diff_filename, $diff) === false) {\n error(\"Cannot create test diff - $diff_filename\");\n }\n\n \/\/ write .log\n if (strpos($log_format, 'L') !== false && file_put_contents($log_filename, \"\n---- EXPECTED OUTPUT\n$wanted\n---- ACTUAL OUTPUT\n$output\n---- FAILED\n\") === false) {\n error(\"Cannot create test log - $log_filename\");\n error_report($file, $log_filename, $tested);\n }\n }\n\n if (!$passed || $leaked) {\n \/\/ write .sh\n if (strpos($log_format, 'S') !== false) {\n $env_lines = [];\n foreach ($env as $env_var => $env_val) {\n $env_lines[] = \"export $env_var=\" . escapeshellarg($env_val ?? \"\");\n }\n $exported_environment = $env_lines ? \"\\n\" . implode(\"\\n\", $env_lines) . \"\\n\" : \"\";\n $sh_script = <<<SH\n#!\/bin\/sh\n{$exported_environment}\ncase \"$1\" in\n\"gdb\")\n gdb --args {$orig_cmd}\n ;;\n\"lldb\")\n lldb -- {$orig_cmd}\n ;;\n\"valgrind\")\n USE_ZEND_ALLOC=0 valgrind $2 {$orig_cmd}\n ;;\n\"rr\")\n rr record $2 {$orig_cmd}\n ;;\n*)\n {$orig_cmd}\n ;;\nesac\nSH;\n if (file_put_contents($sh_filename, $sh_script) === false) {\n error(\"Cannot create test shell script - $sh_filename\");\n }\n chmod($sh_filename, 0755);\n }\n }\n\n if ($valgrind && $leaked && $cfg[\"show\"][\"mem\"]) {\n show_file_block('mem', file_get_contents($memcheck_filename));\n }\n\n show_result(implode('&', $restype), $tested, $tested_file, $info, $temp_filenames);\n\n foreach ($restype as $type) {\n $PHP_FAILED_TESTS[$type . 'ED'][] = [\n 'name' => $file,\n 'test_name' => (is_array($IN_REDIRECT) ? $IN_REDIRECT['via'] : '') . $tested . \" [$tested_file]\",\n 'output' => $output_filename,\n 'diff' => $diff_filename,\n 'info' => $info,\n ];\n }\n\n $diff = empty($diff) ? '' : preg_replace('\/\\e\/', '<esc>', $diff);\n\n $junit->markTestAs($restype, $shortname, $tested, null, $info, $diff);\n\n return $restype[0] . 'ED';\n}\n\n\/**\n * @return bool|int\n *\/\nfunction comp_line(string $l1, string $l2, bool $is_reg)\n{\n if ($is_reg) {\n return preg_match('\/^' . $l1 . '$\/s', $l2);\n }\n\n return !strcmp($l1, $l2);\n}\n\nfunction count_array_diff(\n array $ar1,\n array $ar2,\n bool $is_reg,\n array $w,\n int $idx1,\n int $idx2,\n int $cnt1,\n int $cnt2,\n int $steps\n): int {\n $equal = 0;\n\n while ($idx1 < $cnt1 && $idx2 < $cnt2 && comp_line($ar1[$idx1], $ar2[$idx2], $is_reg)) {\n $idx1++;\n $idx2++;\n $equal++;\n $steps--;\n }\n if (--$steps > 0) {\n $eq1 = 0;\n $st = $steps \/ 2;\n\n for ($ofs1 = $idx1 + 1; $ofs1 < $cnt1 && $st-- > 0; $ofs1++) {\n $eq = @count_array_diff($ar1, $ar2, $is_reg, $w, $ofs1, $idx2, $cnt1, $cnt2, $st);\n\n if ($eq > $eq1) {\n $eq1 = $eq;\n }\n }\n\n $eq2 = 0;\n $st = $steps;\n\n for ($ofs2 = $idx2 + 1; $ofs2 < $cnt2 && $st-- > 0; $ofs2++) {\n $eq = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1, $ofs2, $cnt1, $cnt2, $st);\n if ($eq > $eq2) {\n $eq2 = $eq;\n }\n }\n\n if ($eq1 > $eq2) {\n $equal += $eq1;\n } elseif ($eq2 > 0) {\n $equal += $eq2;\n }\n }\n\n return $equal;\n}\n\nfunction generate_array_diff(array $ar1, array $ar2, bool $is_reg, array $w): array\n{\n global $context_line_count;\n $idx1 = 0;\n $cnt1 = @count($ar1);\n $idx2 = 0;\n $cnt2 = @count($ar2);\n $diff = [];\n $old1 = [];\n $old2 = [];\n $number_len = max(3, strlen((string)max($cnt1 + 1, $cnt2 + 1)));\n $line_number_spec = '%0' . $number_len . 'd';\n\n \/** Mapping from $idx2 to $idx1, including indexes of idx2 that are identical to idx1 as well as entries that don't have matches *\/\n $mapping = [];\n\n while ($idx1 < $cnt1 && $idx2 < $cnt2) {\n $mapping[$idx2] = $idx1;\n if (comp_line($ar1[$idx1], $ar2[$idx2], $is_reg)) {\n $idx1++;\n $idx2++;\n continue;\n }\n\n $c1 = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1 + 1, $idx2, $cnt1, $cnt2, 10);\n $c2 = @count_array_diff($ar1, $ar2, $is_reg, $w, $idx1, $idx2 + 1, $cnt1, $cnt2, 10);\n\n if ($c1 > $c2) {\n $old1[$idx1] = sprintf(\"{$line_number_spec}- \", $idx1 + 1) . $w[$idx1++];\n } elseif ($c2 > 0) {\n $old2[$idx2] = sprintf(\"{$line_number_spec}+ \", $idx2 + 1) . $ar2[$idx2++];\n } else {\n $old1[$idx1] = sprintf(\"{$line_number_spec}- \", $idx1 + 1) . $w[$idx1++];\n $old2[$idx2] = sprintf(\"{$line_number_spec}+ \", $idx2 + 1) . $ar2[$idx2++];\n }\n $last_printed_context_line = $idx1;\n }\n $mapping[$idx2] = $idx1;\n\n reset($old1);\n $k1 = key($old1);\n $l1 = -2;\n reset($old2);\n $k2 = key($old2);\n $l2 = -2;\n $old_k1 = -1;\n $add_context_lines = function (int $new_k1) use (&$old_k1, &$diff, $w, $context_line_count, $number_len) {\n if ($old_k1 >= $new_k1 || !$context_line_count) {\n return;\n }\n $end = $new_k1 - 1;\n $range_end = min($end, $old_k1 + $context_line_count);\n if ($old_k1 >= 0) {\n while ($old_k1 < $range_end) {\n $diff[] = str_repeat(' ', $number_len + 2) . $w[$old_k1++];\n }\n }\n if ($end - $context_line_count > $old_k1) {\n $old_k1 = $end - $context_line_count;\n if ($old_k1 > 0) {\n \/\/ Add a '--' to mark sections where the common areas were truncated\n $diff[] = '--';\n }\n }\n $old_k1 = max($old_k1, 0);\n while ($old_k1 < $end) {\n $diff[] = str_repeat(' ', $number_len + 2) . $w[$old_k1++];\n }\n $old_k1 = $new_k1;\n };\n\n while ($k1 !== null || $k2 !== null) {\n if ($k1 == $l1 + 1 || $k2 === null) {\n $add_context_lines($k1);\n $l1 = $k1;\n $diff[] = current($old1);\n $old_k1 = $k1;\n $k1 = next($old1) ? key($old1) : null;\n } elseif ($k2 == $l2 + 1 || $k1 === null) {\n $add_context_lines($mapping[$k2]);\n $l2 = $k2;\n $diff[] = current($old2);\n $k2 = next($old2) ? key($old2) : null;\n } elseif ($k1 < $mapping[$k2]) {\n $add_context_lines($k1);\n $l1 = $k1;\n $diff[] = current($old1);\n $k1 = next($old1) ? key($old1) : null;\n } else {\n $add_context_lines($mapping[$k2]);\n $l2 = $k2;\n $diff[] = current($old2);\n $k2 = next($old2) ? key($old2) : null;\n }\n }\n\n while ($idx1 < $cnt1) {\n $add_context_lines($idx1 + 1);\n $diff[] = sprintf(\"{$line_number_spec}- \", $idx1 + 1) . $w[$idx1++];\n }\n\n while ($idx2 < $cnt2) {\n if (isset($mapping[$idx2])) {\n $add_context_lines($mapping[$idx2] + 1);\n }\n $diff[] = sprintf(\"{$line_number_spec}+ \", $idx2 + 1) . $ar2[$idx2++];\n }\n $add_context_lines(min($old_k1 + $context_line_count + 1, $cnt1 + 1));\n if ($context_line_count && $old_k1 < $cnt1 + 1) {\n \/\/ Add a '--' to mark sections where the common areas were truncated\n $diff[] = '--';\n }\n\n return $diff;\n}\n\nfunction generate_diff_external(string $diff_cmd, string $exp_file, string $output_file): string\n{\n $retval = shell_exec(\"{$diff_cmd} {$exp_file} {$output_file}\");\n\n return is_string($retval) ? $retval : 'Could not run external diff tool set through PHP_TEST_DIFF_CMD environment variable';\n}\n\nfunction generate_diff(string $wanted, ?string $wanted_re, string $output): string\n{\n $w = explode(\"\\n\", $wanted);\n $o = explode(\"\\n\", $output);\n $r = is_null($wanted_re) ? $w : explode(\"\\n\", $wanted_re);\n $diff = generate_array_diff($r, $o, !is_null($wanted_re), $w);\n\n return implode(PHP_EOL, $diff);\n}\n\nfunction error(string $message): void\n{\n echo \"ERROR: {$message}\\n\";\n exit(1);\n}\n\nfunction settings2array(array $settings, &$ini_settings): void\n{\n foreach ($settings as $setting) {\n if (strpos($setting, '=') !== false) {\n $setting = explode(\"=\", $setting, 2);\n $name = trim($setting[0]);\n $value = trim($setting[1]);\n\n if ($name == 'extension' || $name == 'zend_extension') {\n if (!isset($ini_settings[$name])) {\n $ini_settings[$name] = [];\n }\n\n $ini_settings[$name][] = $value;\n } else {\n $ini_settings[$name] = $value;\n }\n }\n }\n}\n\nfunction settings2params(array $ini_settings): string\n{\n $settings = '';\n\n foreach ($ini_settings as $name => $value) {\n if (is_array($value)) {\n foreach ($value as $val) {\n $val = addslashes($val);\n $settings .= \" -d \\\"$name=$val\\\"\";\n }\n } else {\n if (IS_WINDOWS && !empty($value) && $value[0] == '\"') {\n $len = strlen($value);\n\n if ($value[$len - 1] == '\"') {\n $value[0] = \"'\";\n $value[$len - 1] = \"'\";\n }\n } else {\n $value = addslashes($value);\n }\n\n $settings .= \" -d \\\"$name=$value\\\"\";\n }\n }\n\n return $settings;\n}\n\nfunction compute_summary(): void\n{\n global $n_total, $test_results, $ignored_by_ext, $sum_results, $percent_results;\n\n $n_total = count($test_results);\n $n_total += $ignored_by_ext;\n $sum_results = [\n 'PASSED' => 0,\n 'WARNED' => 0,\n 'SKIPPED' => 0,\n 'FAILED' => 0,\n 'BORKED' => 0,\n 'LEAKED' => 0,\n 'XFAILED' => 0,\n 'XLEAKED' => 0\n ];\n\n foreach ($test_results as $v) {\n $sum_results[$v]++;\n }\n\n $sum_results['SKIPPED'] += $ignored_by_ext;\n $percent_results = [];\n\n foreach ($sum_results as $v => $n) {\n $percent_results[$v] = (100.0 * $n) \/ $n_total;\n }\n}\n\nfunction get_summary(bool $show_ext_summary): string\n{\n global $exts_skipped, $exts_tested, $n_total, $sum_results, $percent_results, $end_time, $start_time, $failed_test_summary, $PHP_FAILED_TESTS, $valgrind;\n\n $x_total = $n_total - $sum_results['SKIPPED'] - $sum_results['BORKED'];\n\n if ($x_total) {\n $x_warned = (100.0 * $sum_results['WARNED']) \/ $x_total;\n $x_failed = (100.0 * $sum_results['FAILED']) \/ $x_total;\n $x_xfailed = (100.0 * $sum_results['XFAILED']) \/ $x_total;\n $x_xleaked = (100.0 * $sum_results['XLEAKED']) \/ $x_total;\n $x_leaked = (100.0 * $sum_results['LEAKED']) \/ $x_total;\n $x_passed = (100.0 * $sum_results['PASSED']) \/ $x_total;\n } else {\n $x_warned = $x_failed = $x_passed = $x_leaked = $x_xfailed = $x_xleaked = 0;\n }\n\n $summary = '';\n\n if ($show_ext_summary) {\n $summary .= '\n=====================================================================\nTEST RESULT SUMMARY\n---------------------------------------------------------------------\nExts skipped : ' . sprintf('%4d', $exts_skipped) . '\nExts tested : ' . sprintf('%4d', $exts_tested) . '\n---------------------------------------------------------------------\n';\n }\n\n $summary .= '\nNumber of tests : ' . sprintf('%4d', $n_total) . ' ' . sprintf('%8d', $x_total);\n\n if ($sum_results['BORKED']) {\n $summary .= '\nTests borked : ' . sprintf('%4d (%5.1f%%)', $sum_results['BORKED'], $percent_results['BORKED']) . ' --------';\n }\n\n $summary .= '\nTests skipped : ' . sprintf('%4d (%5.1f%%)', $sum_results['SKIPPED'], $percent_results['SKIPPED']) . ' --------\nTests warned : ' . sprintf('%4d (%5.1f%%)', $sum_results['WARNED'], $percent_results['WARNED']) . ' ' . sprintf('(%5.1f%%)', $x_warned) . '\nTests failed : ' . sprintf('%4d (%5.1f%%)', $sum_results['FAILED'], $percent_results['FAILED']) . ' ' . sprintf('(%5.1f%%)', $x_failed);\n\n if ($sum_results['XFAILED']) {\n $summary .= '\nExpected fail : ' . sprintf('%4d (%5.1f%%)', $sum_results['XFAILED'], $percent_results['XFAILED']) . ' ' . sprintf('(%5.1f%%)', $x_xfailed);\n }\n\n if ($valgrind) {\n $summary .= '\nTests leaked : ' . sprintf('%4d (%5.1f%%)', $sum_results['LEAKED'], $percent_results['LEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_leaked);\n if ($sum_results['XLEAKED']) {\n $summary .= '\nExpected leak : ' . sprintf('%4d (%5.1f%%)', $sum_results['XLEAKED'], $percent_results['XLEAKED']) . ' ' . sprintf('(%5.1f%%)', $x_xleaked);\n }\n }\n\n $summary .= '\nTests passed : ' . sprintf('%4d (%5.1f%%)', $sum_results['PASSED'], $percent_results['PASSED']) . ' ' . sprintf('(%5.1f%%)', $x_passed) . '\n---------------------------------------------------------------------\nTime taken : ' . sprintf('%4d seconds', $end_time - $start_time) . '\n=====================================================================\n';\n $failed_test_summary = '';\n\n if (count($PHP_FAILED_TESTS['SLOW'])) {\n usort($PHP_FAILED_TESTS['SLOW'], function (array $a, array $b): int {\n return $a['info'] < $b['info'] ? 1 : -1;\n });\n\n $failed_test_summary .= '\n=====================================================================\nSLOW TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['SLOW'] as $failed_test_data) {\n $failed_test_summary .= sprintf('(%.3f s) ', $failed_test_data['info']) . $failed_test_data['test_name'] . \"\\n\";\n }\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if (count($PHP_FAILED_TESTS['XFAILED'])) {\n $failed_test_summary .= '\n=====================================================================\nEXPECTED FAILED TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['XFAILED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . \"\\n\";\n }\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if (count($PHP_FAILED_TESTS['BORKED'])) {\n $failed_test_summary .= '\n=====================================================================\nBORKED TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['BORKED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['info'] . \"\\n\";\n }\n\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if (count($PHP_FAILED_TESTS['FAILED'])) {\n $failed_test_summary .= '\n=====================================================================\nFAILED TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['FAILED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . \"\\n\";\n }\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n if (count($PHP_FAILED_TESTS['WARNED'])) {\n $failed_test_summary .= '\n=====================================================================\nWARNED TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['WARNED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . \"\\n\";\n }\n\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if (count($PHP_FAILED_TESTS['LEAKED'])) {\n $failed_test_summary .= '\n=====================================================================\nLEAKED TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['LEAKED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . \"\\n\";\n }\n\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if (count($PHP_FAILED_TESTS['XLEAKED'])) {\n $failed_test_summary .= '\n=====================================================================\nEXPECTED LEAK TEST SUMMARY\n---------------------------------------------------------------------\n';\n foreach ($PHP_FAILED_TESTS['XLEAKED'] as $failed_test_data) {\n $failed_test_summary .= $failed_test_data['test_name'] . $failed_test_data['info'] . \"\\n\";\n }\n\n $failed_test_summary .= \"=====================================================================\\n\";\n }\n\n if ($failed_test_summary && !getenv('NO_PHPTEST_SUMMARY')) {\n $summary .= $failed_test_summary;\n }\n\n return $summary;\n}\n\nfunction show_start($start_time): void\n{\n echo \"TIME START \" . date('Y-m-d H:i:s', $start_time) . \"\\n=====================================================================\\n\";\n}\n\nfunction show_end($end_time): void\n{\n echo \"=====================================================================\\nTIME END \" . date('Y-m-d H:i:s', $end_time) . \"\\n\";\n}\n\nfunction show_summary(): void\n{\n echo get_summary(true);\n}\n\nfunction show_redirect_start(string $tests, string $tested, string $tested_file): void\n{\n global $SHOW_ONLY_GROUPS, $show_progress;\n\n if (!$SHOW_ONLY_GROUPS || in_array('REDIRECT', $SHOW_ONLY_GROUPS)) {\n echo \"REDIRECT $tests ($tested [$tested_file]) begin\\n\";\n } elseif ($show_progress) {\n clear_show_test();\n }\n}\n\nfunction show_redirect_ends(string $tests, string $tested, string $tested_file): void\n{\n global $SHOW_ONLY_GROUPS, $show_progress;\n\n if (!$SHOW_ONLY_GROUPS || in_array('REDIRECT', $SHOW_ONLY_GROUPS)) {\n echo \"REDIRECT $tests ($tested [$tested_file]) done\\n\";\n } elseif ($show_progress) {\n clear_show_test();\n }\n}\n\nfunction show_test(int $test_idx, string $shortname): void\n{\n global $test_cnt;\n global $line_length;\n\n $str = \"TEST $test_idx\/$test_cnt [$shortname]\\r\";\n $line_length = strlen($str);\n echo $str;\n flush();\n}\n\nfunction clear_show_test(): void\n{\n global $line_length;\n \/\/ Parallel testing\n global $workerID;\n\n if (!$workerID && isset($line_length)) {\n \/\/ Write over the last line to avoid random trailing chars on next echo\n echo str_repeat(\" \", $line_length), \"\\r\";\n }\n}\n\nfunction parse_conflicts(string $text): array\n{\n \/\/ Strip comments\n $text = preg_replace('\/#.*\/', '', $text);\n return array_map('trim', explode(\"\\n\", trim($text)));\n}\n\nfunction show_result(\n string $result,\n string $tested,\n string $tested_file,\n string $extra = '',\n ?array $temp_filenames = null\n): void {\n global $SHOW_ONLY_GROUPS, $colorize, $show_progress;\n\n if (!$SHOW_ONLY_GROUPS || in_array($result, $SHOW_ONLY_GROUPS)) {\n if ($colorize) {\n \/* Use ANSI escape codes for coloring test result *\/\n switch ( $result ) {\n case 'PASS': \/\/ Light Green\n $color = \"\\e[1;32m{$result}\\e[0m\"; break;\n case 'FAIL':\n case 'BORK':\n case 'LEAK':\n case 'LEAK&FAIL':\n \/\/ Light Red\n $color = \"\\e[1;31m{$result}\\e[0m\"; break;\n default: \/\/ Yellow\n $color = \"\\e[1;33m{$result}\\e[0m\"; break;\n }\n\n echo \"$color $tested [$tested_file] $extra\\n\";\n } else {\n echo \"$result $tested [$tested_file] $extra\\n\";\n }\n } elseif ($show_progress) {\n clear_show_test();\n }\n}\n\nclass BorkageException extends Exception\n{\n}\n\nclass JUnit\n{\n private bool $enabled = true;\n private $fp = null;\n private array $suites = [];\n private array $rootSuite = self::EMPTY_SUITE + ['name' => 'php'];\n\n private const EMPTY_SUITE = [\n 'test_total' => 0,\n 'test_pass' => 0,\n 'test_fail' => 0,\n 'test_error' => 0,\n 'test_skip' => 0,\n 'test_warn' => 0,\n 'files' => [],\n 'execution_time' => 0,\n ];\n\n \/**\n * @throws Exception\n *\/\n public function __construct(array $env, int $workerID)\n {\n \/\/ Check whether a junit log is wanted.\n $fileName = $env['TEST_PHP_JUNIT'] ?? null;\n if (empty($fileName)) {\n $this->enabled = false;\n return;\n }\n if (!$workerID && !$this->fp = fopen($fileName, 'w')) {\n throw new Exception(\"Failed to open $fileName for writing.\");\n }\n }\n\n public function isEnabled(): bool\n {\n return $this->enabled;\n }\n\n public function clear(): void\n {\n $this->rootSuite = self::EMPTY_SUITE + ['name' => 'php'];\n $this->suites = [];\n }\n\n public function saveXML(): void\n {\n if (!$this->enabled) {\n return;\n }\n\n $xml = '<' . '?' . 'xml version=\"1.0\" encoding=\"UTF-8\"' . '?' . '>' . PHP_EOL;\n $xml .= sprintf(\n '<testsuites name=\"%s\" tests=\"%s\" failures=\"%d\" errors=\"%d\" skip=\"%d\" time=\"%s\">' . PHP_EOL,\n $this->rootSuite['name'],\n $this->rootSuite['test_total'],\n $this->rootSuite['test_fail'],\n $this->rootSuite['test_error'],\n $this->rootSuite['test_skip'],\n $this->rootSuite['execution_time']\n );\n $xml .= $this->getSuitesXML();\n $xml .= '<\/testsuites>';\n fwrite($this->fp, $xml);\n }\n\n private function getSuitesXML(string $suite_name = '')\n {\n \/\/ FIXME: $suite_name gets overwritten\n $result = '';\n\n foreach ($this->suites as $suite_name => $suite) {\n $result .= sprintf(\n '<testsuite name=\"%s\" tests=\"%s\" failures=\"%d\" errors=\"%d\" skip=\"%d\" time=\"%s\">' . PHP_EOL,\n $suite['name'],\n $suite['test_total'],\n $suite['test_fail'],\n $suite['test_error'],\n $suite['test_skip'],\n $suite['execution_time']\n );\n\n if (!empty($suite_name)) {\n foreach ($suite['files'] as $file) {\n $result .= $this->rootSuite['files'][$file]['xml'];\n }\n }\n\n $result .= '<\/testsuite>' . PHP_EOL;\n }\n\n return $result;\n }\n\n public function markTestAs(\n $type,\n string $file_name,\n string $test_name,\n ?int $time = null,\n string $message = '',\n string $details = ''\n ): void {\n if (!$this->enabled) {\n return;\n }\n\n $suite = $this->getSuiteName($file_name);\n\n $this->record($suite, 'test_total');\n\n $time = $time ?? $this->getTimer($file_name);\n $this->record($suite, 'execution_time', $time);\n\n $escaped_details = htmlspecialchars($details, ENT_QUOTES, 'UTF-8');\n $escaped_details = preg_replace_callback('\/[\\0-\\x08\\x0B\\x0C\\x0E-\\x1F]\/', function ($c) {\n return sprintf('[[0x%02x]]', ord($c[0]));\n }, $escaped_details);\n $escaped_message = htmlspecialchars($message, ENT_QUOTES, 'UTF-8');\n\n $escaped_test_name = htmlspecialchars($file_name . ' (' . $test_name . ')', ENT_QUOTES);\n $this->rootSuite['files'][$file_name]['xml'] = \"<testcase name='$escaped_test_name' time='$time'>\\n\";\n\n if (is_array($type)) {\n $output_type = $type[0] . 'ED';\n $temp = array_intersect(['XFAIL', 'XLEAK', 'FAIL', 'WARN'], $type);\n $type = reset($temp);\n } else {\n $output_type = $type . 'ED';\n }\n\n if ('PASS' == $type || 'XFAIL' == $type || 'XLEAK' == $type) {\n $this->record($suite, 'test_pass');\n } elseif ('BORK' == $type) {\n $this->record($suite, 'test_error');\n $this->rootSuite['files'][$file_name]['xml'] .= \"<error type='$output_type' message='$escaped_message'\/>\\n\";\n } elseif ('SKIP' == $type) {\n $this->record($suite, 'test_skip');\n $this->rootSuite['files'][$file_name]['xml'] .= \"<skipped>$escaped_message<\/skipped>\\n\";\n } elseif ('WARN' == $type) {\n $this->record($suite, 'test_warn');\n $this->rootSuite['files'][$file_name]['xml'] .= \"<warning>$escaped_message<\/warning>\\n\";\n } elseif ('FAIL' == $type) {\n $this->record($suite, 'test_fail');\n $this->rootSuite['files'][$file_name]['xml'] .= \"<failure type='$output_type' message='$escaped_message'>$escaped_details<\/failure>\\n\";\n } else {\n $this->record($suite, 'test_error');\n $this->rootSuite['files'][$file_name]['xml'] .= \"<error type='$output_type' message='$escaped_message'>$escaped_details<\/error>\\n\";\n }\n\n $this->rootSuite['files'][$file_name]['xml'] .= \"<\/testcase>\\n\";\n }\n\n private function record(string $suite, string $param, $value = 1): void\n {\n $this->rootSuite[$param] += $value;\n $this->suites[$suite][$param] += $value;\n }\n\n private function getTimer(string $file_name)\n {\n if (!$this->enabled) {\n return 0;\n }\n\n if (isset($this->rootSuite['files'][$file_name]['total'])) {\n return number_format($this->rootSuite['files'][$file_name]['total'], 4);\n }\n\n return 0;\n }\n\n public function startTimer(string $file_name): void\n {\n if (!$this->enabled) {\n return;\n }\n\n if (!isset($this->rootSuite['files'][$file_name]['start'])) {\n $this->rootSuite['files'][$file_name]['start'] = microtime(true);\n\n $suite = $this->getSuiteName($file_name);\n $this->initSuite($suite);\n $this->suites[$suite]['files'][$file_name] = $file_name;\n }\n }\n\n public function getSuiteName(string $file_name): string\n {\n return $this->pathToClassName(dirname($file_name));\n }\n\n private function pathToClassName(string $file_name): string\n {\n if (!$this->enabled) {\n return '';\n }\n\n $ret = $this->rootSuite['name'];\n $_tmp = [];\n\n \/\/ lookup whether we're in the PHP source checkout\n $max = 5;\n if (is_file($file_name)) {\n $dir = dirname(realpath($file_name));\n } else {\n $dir = realpath($file_name);\n }\n do {\n array_unshift($_tmp, basename($dir));\n $chk = $dir . DIRECTORY_SEPARATOR . \"main\" . DIRECTORY_SEPARATOR . \"php_version.h\";\n $dir = dirname($dir);\n } while (!file_exists($chk) && --$max > 0);\n if (file_exists($chk)) {\n if ($max) {\n array_shift($_tmp);\n }\n foreach ($_tmp as $p) {\n $ret .= \".\" . preg_replace(\",[^a-z0-9]+,i\", \".\", $p);\n }\n return $ret;\n }\n\n return $this->rootSuite['name'] . '.' . str_replace([DIRECTORY_SEPARATOR, '-'], '.', $file_name);\n }\n\n public function initSuite(string $suite_name): void\n {\n if (!$this->enabled) {\n return;\n }\n\n if (!empty($this->suites[$suite_name])) {\n return;\n }\n\n $this->suites[$suite_name] = self::EMPTY_SUITE + ['name' => $suite_name];\n }\n\n \/**\n * @throws Exception\n *\/\n public function stopTimer(string $file_name): void\n {\n if (!$this->enabled) {\n return;\n }\n\n if (!isset($this->rootSuite['files'][$file_name]['start'])) {\n throw new Exception(\"Timer for $file_name was not started!\");\n }\n\n if (!isset($this->rootSuite['files'][$file_name]['total'])) {\n $this->rootSuite['files'][$file_name]['total'] = 0;\n }\n\n $start = $this->rootSuite['files'][$file_name]['start'];\n $this->rootSuite['files'][$file_name]['total'] += microtime(true) - $start;\n unset($this->rootSuite['files'][$file_name]['start']);\n }\n\n public function mergeResults(?JUnit $other): void\n {\n if (!$this->enabled || !$other) {\n return;\n }\n\n $this->mergeSuites($this->rootSuite, $other->rootSuite);\n foreach ($other->suites as $name => $suite) {\n if (!isset($this->suites[$name])) {\n $this->suites[$name] = $suite;\n continue;\n }\n\n $this->mergeSuites($this->suites[$name], $suite);\n }\n }\n\n private function mergeSuites(array &$dest, array $source): void\n {\n $dest['test_total'] += $source['test_total'];\n $dest['test_pass'] += $source['test_pass'];\n $dest['test_fail'] += $source['test_fail'];\n $dest['test_error'] += $source['test_error'];\n $dest['test_skip'] += $source['test_skip'];\n $dest['test_warn'] += $source['test_warn'];\n $dest['execution_time'] += $source['execution_time'];\n $dest['files'] += $source['files'];\n }\n}\n\nclass SkipCache\n{\n private bool $enable;\n private bool $keepFile;\n\n private array $skips = [];\n private array $extensions = [];\n\n private int $hits = 0;\n private int $misses = 0;\n private int $extHits = 0;\n private int $extMisses = 0;\n\n public function __construct(bool $enable, bool $keepFile)\n {\n $this->enable = $enable;\n $this->keepFile = $keepFile;\n }\n\n public function checkSkip(string $php, string $code, string $checkFile, string $tempFile, array $env): string\n {\n \/\/ Extension tests frequently use something like <?php require 'skipif.inc';\n \/\/ for skip checks. This forces us to cache per directory to avoid pollution.\n $dir = dirname($checkFile);\n $key = \"$php => $dir\";\n\n if (isset($this->skips[$key][$code])) {\n $this->hits++;\n if ($this->keepFile) {\n save_text($checkFile, $code, $tempFile);\n }\n return $this->skips[$key][$code];\n }\n\n save_text($checkFile, $code, $tempFile);\n $result = trim(system_with_timeout(\"$php \\\"$checkFile\\\"\", $env));\n if (strpos($result, 'nocache') === 0) {\n $result = '';\n } else if ($this->enable) {\n $this->skips[$key][$code] = $result;\n }\n $this->misses++;\n\n if (!$this->keepFile) {\n @unlink($checkFile);\n }\n\n return $result;\n }\n\n public function getExtensions(string $php): array\n {\n if (isset($this->extensions[$php])) {\n $this->extHits++;\n return $this->extensions[$php];\n }\n\n $extDir = shell_exec(\"$php -d display_errors=0 -r \\\"echo ini_get('extension_dir');\\\"\");\n $extensions = explode(\",\", shell_exec(\"$php -d display_errors=0 -r \\\"echo implode(',', get_loaded_extensions());\\\"\"));\n $extensions = array_map('strtolower', $extensions);\n if (in_array('zend opcache', $extensions)) {\n $extensions[] = 'opcache';\n }\n\n $result = [$extDir, $extensions];\n $this->extensions[$php] = $result;\n $this->extMisses++;\n\n return $result;\n }\n\n\/\/ public function __destruct()\n\/\/ {\n\/\/ echo \"Skips: {$this->hits} hits, {$this->misses} misses.\\n\";\n\/\/ echo \"Extensions: {$this->extHits} hits, {$this->extMisses} misses.\\n\";\n\/\/ echo \"Cache distribution:\\n\";\n\/\/\n\/\/ foreach ($this->skips as $php => $cache) {\n\/\/ echo \"$php: \" . count($cache) . \"\\n\";\n\/\/ }\n\/\/ }\n}\n\nclass RuntestsValgrind\n{\n protected $version = '';\n protected $header = '';\n protected $version_3_8_0 = false;\n protected $tool = null;\n\n public function getVersion(): string\n {\n return $this->version;\n }\n\n public function getHeader(): string\n {\n return $this->header;\n }\n\n public function __construct(array $environment, string $tool = 'memcheck')\n {\n $this->tool = $tool;\n $header = system_with_timeout(\"valgrind --tool={$this->tool} --version\", $environment);\n if (!$header) {\n error(\"Valgrind returned no version info for {$this->tool}, cannot proceed.\\n\".\n \"Please check if Valgrind is installed and the tool is named correctly.\");\n }\n $count = 0;\n $version = preg_replace(\"\/valgrind-(\\d+)\\.(\\d+)\\.(\\d+)([.\\w_-]+)?(\\s+)\/\", '$1.$2.$3', $header, 1, $count);\n if ($count != 1) {\n error(\"Valgrind returned invalid version info (\\\"{$header}\\\") for {$this->tool}, cannot proceed.\");\n }\n $this->version = $version;\n $this->header = sprintf(\n \"%s (%s)\", trim($header), $this->tool);\n $this->version_3_8_0 = version_compare($version, '3.8.0', '>=');\n }\n\n public function wrapCommand(string $cmd, string $memcheck_filename, bool $check_all): string\n {\n $vcmd = \"valgrind -q --tool={$this->tool} --trace-children=yes\";\n if ($check_all) {\n $vcmd .= ' --smc-check=all';\n }\n\n \/* --vex-iropt-register-updates=allregs-at-mem-access is necessary for phpdbg watchpoint tests *\/\n if ($this->version_3_8_0) {\n return \"$vcmd --vex-iropt-register-updates=allregs-at-mem-access --log-file=$memcheck_filename $cmd\";\n }\n return \"$vcmd --vex-iropt-precise-memory-exns=yes --log-file=$memcheck_filename $cmd\";\n }\n}\n\nclass TestFile\n{\n private string $fileName;\n\n private array $sections = ['TEST' => ''];\n\n private const ALLOWED_SECTIONS = [\n 'EXPECT', 'EXPECTF', 'EXPECTREGEX', 'EXPECTREGEX_EXTERNAL', 'EXPECT_EXTERNAL', 'EXPECTF_EXTERNAL', 'EXPECTHEADERS',\n 'POST', 'POST_RAW', 'GZIP_POST', 'DEFLATE_POST', 'PUT', 'GET', 'COOKIE', 'ARGS',\n 'FILE', 'FILEEOF', 'FILE_EXTERNAL', 'REDIRECTTEST',\n 'CAPTURE_STDIO', 'STDIN', 'CGI', 'PHPDBG',\n 'INI', 'ENV', 'EXTENSIONS',\n 'SKIPIF', 'XFAIL', 'XLEAK', 'CLEAN',\n 'CREDITS', 'DESCRIPTION', 'CONFLICTS', 'WHITESPACE_SENSITIVE',\n ];\n\n \/**\n * @throws BorkageException\n *\/\n public function __construct(string $fileName, bool $inRedirect)\n {\n $this->fileName = $fileName;\n\n $this->readFile();\n $this->validateAndProcess($inRedirect);\n }\n\n public function hasSection(string $name): bool\n {\n return isset($this->sections[$name]);\n }\n\n public function hasAllSections(string ...$names): bool\n {\n foreach ($names as $section) {\n if (!isset($this->sections[$section])) {\n return false;\n }\n }\n\n return true;\n }\n\n public function hasAnySections(string ...$names): bool\n {\n foreach ($names as $section) {\n if (isset($this->sections[$section])) {\n return true;\n }\n }\n\n return false;\n }\n\n public function sectionNotEmpty(string $name): bool\n {\n return !empty($this->sections[$name]);\n }\n\n \/**\n * @throws Exception\n *\/\n public function getSection(string $name): string\n {\n if (!isset($this->sections[$name])) {\n throw new Exception(\"Section $name not found\");\n }\n return $this->sections[$name];\n }\n\n public function getName(): string\n {\n return trim($this->getSection('TEST'));\n }\n\n public function isCGI(): bool\n {\n return $this->hasSection('CGI')\n || $this->sectionNotEmpty('GET')\n || $this->sectionNotEmpty('POST')\n || $this->sectionNotEmpty('GZIP_POST')\n || $this->sectionNotEmpty('DEFLATE_POST')\n || $this->sectionNotEmpty('POST_RAW')\n || $this->sectionNotEmpty('PUT')\n || $this->sectionNotEmpty('COOKIE')\n || $this->sectionNotEmpty('EXPECTHEADERS');\n }\n\n \/**\n * TODO Refactor to make it not needed\n *\/\n public function setSection(string $name, string $value): void\n {\n $this->sections[$name] = $value;\n }\n\n \/**\n * Load the sections of the test file\n * @throws BorkageException\n *\/\n private function readFile(): void\n {\n $fp = fopen($this->fileName, \"rb\") or error(\"Cannot open test file: {$this->fileName}\");\n\n if (!feof($fp)) {\n $line = fgets($fp);\n\n if ($line === false) {\n throw new BorkageException(\"cannot read test\");\n }\n } else {\n throw new BorkageException(\"empty test [{$this->fileName}]\");\n }\n if (strncmp('--TEST--', $line, 8)) {\n throw new BorkageException(\"tests must start with --TEST-- [{$this->fileName}]\");\n }\n\n $section = 'TEST';\n $secfile = false;\n $secdone = false;\n\n while (!feof($fp)) {\n $line = fgets($fp);\n\n if ($line === false) {\n break;\n }\n\n \/\/ Match the beginning of a section.\n if (preg_match('\/^--([_A-Z]+)--\/', $line, $r)) {\n $section = (string) $r[1];\n\n if (isset($this->sections[$section]) && $this->sections[$section]) {\n throw new BorkageException(\"duplicated $section section\");\n }\n\n \/\/ check for unknown sections\n if (!in_array($section, self::ALLOWED_SECTIONS)) {\n throw new BorkageException('Unknown section \"' . $section . '\"');\n }\n\n $this->sections[$section] = '';\n $secfile = $section == 'FILE' || $section == 'FILEEOF' || $section == 'FILE_EXTERNAL';\n $secdone = false;\n continue;\n }\n\n \/\/ Add to the section text.\n if (!$secdone) {\n $this->sections[$section] .= $line;\n }\n\n \/\/ End of actual test?\n if ($secfile && preg_match('\/^===DONE===\\s*$\/', $line)) {\n $secdone = true;\n }\n }\n\n fclose($fp);\n }\n\n \/**\n * @throws BorkageException\n *\/\n private function validateAndProcess(bool $inRedirect): void\n {\n \/\/ the redirect section allows a set of tests to be reused outside of\n \/\/ a given test dir\n if ($this->hasSection('REDIRECTTEST')) {\n if ($inRedirect) {\n throw new BorkageException(\"Can't redirect a test from within a redirected test\");\n }\n return;\n }\n if (!$this->hasSection('PHPDBG') && $this->hasSection('FILE') + $this->hasSection('FILEEOF') + $this->hasSection('FILE_EXTERNAL') != 1) {\n throw new BorkageException(\"missing section --FILE--\");\n }\n\n if ($this->hasSection('FILEEOF')) {\n $this->sections['FILE'] = preg_replace(\"\/[\\r\\n]+$\/\", '', $this->sections['FILEEOF']);\n unset($this->sections['FILEEOF']);\n }\n\n foreach (['FILE', 'EXPECT', 'EXPECTF', 'EXPECTREGEX'] as $prefix) {\n \/\/ For grepping: FILE_EXTERNAL, EXPECT_EXTERNAL, EXPECTF_EXTERNAL, EXPECTREGEX_EXTERNAL\n $key = $prefix . '_EXTERNAL';\n\n if ($this->hasSection($key)) {\n \/\/ don't allow tests to retrieve files from anywhere but this subdirectory\n $dir = dirname($this->fileName);\n $fileName = $dir . '\/' . trim(str_replace('..', '', $this->getSection($key)));\n\n if (file_exists($fileName)) {\n $this->sections[$prefix] = file_get_contents($fileName);\n } else {\n throw new BorkageException(\"could not load --\" . $key . \"-- \" . $dir . '\/' . trim($fileName));\n }\n }\n }\n\n if (($this->hasSection('EXPECT') + $this->hasSection('EXPECTF') + $this->hasSection('EXPECTREGEX')) != 1) {\n throw new BorkageException(\"missing section --EXPECT--, --EXPECTF-- or --EXPECTREGEX--\");\n }\n\n if ($this->hasSection('PHPDBG') && !$this->hasSection('STDIN')) {\n $this->sections['STDIN'] = $this->sections['PHPDBG'] . \"\\n\";\n }\n }\n}\n\nfunction init_output_buffers(): void\n{\n \/\/ Delete as much output buffers as possible.\n while (@ob_end_clean()) {\n }\n\n if (ob_get_level()) {\n echo \"Not all buffers were deleted.\\n\";\n }\n}\n\nfunction check_proc_open_function_exists(): void\n{\n if (!function_exists('proc_open')) {\n echo <<<NO_PROC_OPEN_ERROR\n\n+-----------------------------------------------------------+\n| ! ERROR ! |\n| The test-suite requires that proc_open() is available. |\n| Please check if you disabled it in php.ini. |\n+-----------------------------------------------------------+\n\nNO_PROC_OPEN_ERROR;\n exit(1);\n }\n}\n\nfunction bless_failed_tests(array $failedTests): void\n{\n if (empty($failedTests)) {\n return;\n }\n $args = [\n PHP_BINARY,\n __DIR__ . '\/scripts\/dev\/bless_tests.php',\n ];\n foreach ($failedTests as $test) {\n $args[] = $test['name'];\n }\n proc_open($args, [], $pipes);\n}\n\nmain();\n"