Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fcd19c06f1 | ||
|
|
4f7868c180 | ||
|
|
d2920a11bc | ||
|
|
1d14600d15 | ||
|
|
9e1c29ef69 | ||
|
|
cd5c38288a | ||
|
|
54b9f60f2f | ||
|
|
d7faef9306 | ||
|
|
2aecb64f72 | ||
|
|
7c208aaa4d | ||
|
|
dbc459abba | ||
|
|
7c00f06910 | ||
|
|
0779728ca4 | ||
|
|
33df9275b7 | ||
|
|
0260e47dec | ||
|
|
1adcaa0e1e | ||
|
|
e88ee94720 | ||
|
|
ef33502369 | ||
|
|
25b3a2b20a | ||
|
|
e8f7af5d5b | ||
|
|
a9b1a7a750 | ||
|
|
c1c84e3a51 | ||
|
|
9d0d70b36f | ||
|
|
5b6b81e6a4 | ||
|
|
6882d840b7 | ||
|
|
c7bf55e51e | ||
|
|
bc1e97d16a | ||
|
|
e44699a53b | ||
|
|
5fdbb259ec | ||
|
|
8ffdd5f9b7 | ||
|
|
4b85795af8 | ||
|
|
fd7242cf1d |
19
ChangeLog
19
ChangeLog
@@ -1,9 +1,26 @@
|
||||
PHORONIX TEST SUITE CHANGE-LOG
|
||||
|
||||
Phoronix Test Suite 9.2.0 Milestone 2
|
||||
13 November 2019
|
||||
|
||||
pts-core: Drop auto-compare sub-command since currently buggy / too server resource intensive
|
||||
pts-core: Add FORCE_MIN_DURATION_PER_TEST
|
||||
pts-core: Don't sort geometric mean results when the only difference between result identifiers is numeric in nature
|
||||
pts-core: Allow preserving the precision of the original workload under test if not overriden by test profile
|
||||
pts-core: Record and report CPU microcode version in benchmark result file
|
||||
system_monitor: Allow multiple PERFORMANCE_PER_SENSOR= to be delimited by comma
|
||||
|
||||
Phoronix Test Suite 9.2.0 Milestone 1
|
||||
17 October 2019
|
||||
|
||||
pts-core: New possible external dependencies for gflags, clang, uuid, tclsh
|
||||
pts-core: External dependencies updates for Windows
|
||||
pts-core: Crash fix for macOS in regression from PTS 9.0.1 launcher change
|
||||
|
||||
Phoronix Test Suite 9.0.1-Asker
|
||||
1 October 2019
|
||||
|
||||
pts-core: Fatal error fix for old PHP5 versions with pts_openbenchmarking_upload crash
|
||||
pts-core: Fatal error fix for old PHP5 versions (RHEL 7) with pts_openbenchmarking_upload crash
|
||||
pts-core: Set AllowResultUploadsToOpenBenchmarking = FALSE in enterprise-setup
|
||||
ob_auto_compare: Show "since [date]" as part of the inline box plot
|
||||
ob_auto_compare: Various tweaks/styling improvements to the inline box-plot display
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Phoronix Test Suite 9.0.1
|
||||
# Phoronix Test Suite 9.2.0m2
|
||||
https://www.phoronix-test-suite.com/
|
||||
|
||||
The **Phoronix Test Suite** is the most comprehensive testing and benchmarking
|
||||
|
||||
@@ -53,7 +53,7 @@ shell_exec("./install-sh /tmp/pts-deb-builder/usr");
|
||||
$pts_version = str_replace("a", "~a", str_replace("b", "~b", PTS_VERSION)); // Fix version
|
||||
|
||||
$phoronix_test_suite_bin = file_get_contents("phoronix-test-suite");
|
||||
$phoronix_test_suite_bin = str_replace("export PTS_DIR=`pwd`", "export PTS_DIR='/usr/share/phoronix-test-suite/'", $phoronix_test_suite_bin);
|
||||
$phoronix_test_suite_bin = str_replace("#export PTS_DIR=`pwd`", "export PTS_DIR='/usr/share/phoronix-test-suite/'", $phoronix_test_suite_bin);
|
||||
file_put_contents("/tmp/pts-deb-builder/usr/bin/phoronix-test-suite", $phoronix_test_suite_bin);
|
||||
shell_exec("chmod +x /tmp/pts-deb-builder/usr/bin/phoronix-test-suite");
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.TH phoronix-test-suite 1 "www.phoronix-test-suite.com" "9.0.1"
|
||||
.TH phoronix-test-suite 1 "www.phoronix-test-suite.com" "9.2.0m2"
|
||||
.SH NAME
|
||||
phoronix-test-suite \- The Phoronix Test Suite is an extensible open-source platform for performing testing and performance evaluation.
|
||||
.SH SYNOPSIS
|
||||
@@ -45,9 +45,6 @@ This option will create a download cache for use by the Phoronix Test Suite. The
|
||||
This option will permanently remove a installed test by the Phoronix Test Suite.
|
||||
.TP
|
||||
.SH TESTING
|
||||
.B auto-compare
|
||||
This option will autonomously determine the most relevant test(s) to run for any selected sub-system(s). The tests to run are determined via OpenBenchmarking.org integration with the global results pool. Related test results from OpenBenchmarking.org are also merged to provide a straight-forward and effective means of carrying out a system comparison. If wishing to find comparable results for any particular test profile(s), simply pass the test profile names as additional arguments to this command.
|
||||
.TP
|
||||
.B benchmark [Test | Suite | OpenBenchmarking ID | Test Result] ...
|
||||
This option will install the selected test(s) (if needed) and will proceed to run the test(s). This option is equivalent to running phoronix-test-suite with the install option followed by the run option. Multiple arguments can be supplied to run additional tests at the same time and save the results into one file.
|
||||
.TP
|
||||
|
||||
@@ -294,9 +294,6 @@ User Options
|
||||
</h3>
|
||||
<p>This option will permanently remove a installed test by the Phoronix Test Suite.</p>
|
||||
<h1>Testing</h1>
|
||||
<h3>auto-compare<em> </em>
|
||||
</h3>
|
||||
<p>This option will autonomously determine the most relevant test(s) to run for any selected sub-system(s). The tests to run are determined via OpenBenchmarking.org integration with the global results pool. Related test results from OpenBenchmarking.org are also merged to provide a straight-forward and effective means of carrying out a system comparison. If wishing to find comparable results for any particular test profile(s), simply pass the test profile names as additional arguments to this command.</p>
|
||||
<h3>benchmark<em> [Test | Suite | OpenBenchmarking ID | Test Result] ...</em>
|
||||
</h3>
|
||||
<p>This option will install the selected test(s) (if needed) and will proceed to run the test(s). This option is equivalent to running phoronix-test-suite with the install option followed by the run option. Multiple arguments can be supplied to run additional tests at the same time and save the results into one file.</p>
|
||||
@@ -815,6 +812,8 @@ Configuration
|
||||
<p>This is similar to the FORCE_TIMES_TO_RUN option but will only be used if the test profile's run count is less than this defined value.</p>
|
||||
<p><strong>FORCE_MIN_TIMES_TO_RUN_CUTOFF</strong></p>
|
||||
<p>When used in conjunction with FORCE_MIN_TIMES_TO_RUN, the override value will only be applied to test profiles where its average run-time length (in minutes) is less than the value specified by FORCE_MIN_TIMES_TO_RUN_CUTOFF.</p>
|
||||
<p><strong>FORCE_MIN_DURATION_PER_TEST</strong></p>
|
||||
<p>This is similar to FORCE_MIN_TIMES_TO_RUN but allows specifying a time (in minutes) that each test should be run for. Each test will loop at least until that amount of time has elapsed. This can be useful for short-running tests if wanting to ensure each test is run long enough to rule out system noise.</p>
|
||||
<p><strong>IGNORE_RUNS</strong></p>
|
||||
<p>IGNORE_RUNS can be passed a comma-separated list of runs to skip on each benchmark. For example, IGNORE_RUNS=1 would always drop the first run from being recorded.</p>
|
||||
<p><strong>NO_FILE_HASH_CHECKS</strong></p>
|
||||
@@ -956,18 +955,24 @@ Virtual Test Suites
|
||||
<h3>Cuda Tests<em> pts/cuda</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Openmp Tests<em> pts/openmp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.</p>
|
||||
<h3>Cloud Tests<em> pts/cloud</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cloud.</p>
|
||||
<h3>Docker Tests<em> pts/docker</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing docker.</p>
|
||||
<h3>Python Tests<em> pts/python</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.</p>
|
||||
<h3>Go Tests<em> pts/go</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Vdpau Tests<em> pts/vdpau</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.</p>
|
||||
@@ -1121,93 +1126,78 @@ Virtual Test Suites
|
||||
<h3>Smp Tests<em> git/smp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.</p>
|
||||
<h3>All PTS Tests<em> pts/all</em>
|
||||
<h3>All SYSTEM Tests<em> system/all</em>
|
||||
</h3>
|
||||
<p>This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.</p>
|
||||
<h3>Installed Tests<em> pts/installed</em>
|
||||
<h3>Installed Tests<em> system/installed</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository that are already installed on the system under test.</p>
|
||||
<h3>Every PTS Test<em> pts/everything</em>
|
||||
<h3>Every SYSTEM Test<em> system/everything</em>
|
||||
</h3>
|
||||
<p>This is a collection of every test profile found within the specified OpenBenchmarking.org repository, including unsupported tests.</p>
|
||||
<h3>Linux Operating System Tests<em> pts/linux</em>
|
||||
<h3>Linux Operating System Tests<em> system/linux</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Linux Operating System.</p>
|
||||
<h3>Solaris Operating System Tests<em> pts/solaris</em>
|
||||
<h3>Solaris Operating System Tests<em> system/solaris</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Solaris Operating System.</p>
|
||||
<h3>BSD Operating System Tests<em> pts/bsd</em>
|
||||
<h3>BSD Operating System Tests<em> system/bsd</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the BSD Operating System.</p>
|
||||
<h3>MacOSX Operating System Tests<em> pts/macosx</em>
|
||||
<h3>MacOSX Operating System Tests<em> system/macosx</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the MacOSX Operating System.</p>
|
||||
<h3>Windows Operating System Tests<em> pts/windows</em>
|
||||
<h3>Windows Operating System Tests<em> system/windows</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Windows Operating System.</p>
|
||||
<h3>Hurd Operating System Tests<em> pts/hurd</em>
|
||||
<h3>Hurd Operating System Tests<em> system/hurd</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Hurd Operating System.</p>
|
||||
<h3>System Subsystem Tests<em> pts/system</em>
|
||||
<h3>System Subsystem Tests<em> system/system</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the System sub-system.</p>
|
||||
<h3>Processor Subsystem Tests<em> pts/processor</em>
|
||||
<h3>Processor Subsystem Tests<em> system/processor</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Processor sub-system.</p>
|
||||
<h3>Graphics Subsystem Tests<em> pts/graphics</em>
|
||||
<h3>Disk Subsystem Tests<em> system/disk</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Disk sub-system.</p>
|
||||
<h3>Graphics Subsystem Tests<em> system/graphics</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Graphics sub-system.</p>
|
||||
<h3>Other Subsystem Tests<em> pts/other</em>
|
||||
<h3>Memory Subsystem Tests<em> system/memory</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Memory sub-system.</p>
|
||||
<h3>Network Subsystem Tests<em> system/network</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Network sub-system.</p>
|
||||
<h3>Other Subsystem Tests<em> system/other</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Other sub-system.</p>
|
||||
<h3>Utility Tests<em> pts/utility</em>
|
||||
<h3>Utility Tests<em> system/utility</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Utility software test.</p>
|
||||
<h3>Simulator Tests<em> pts/simulator</em>
|
||||
<h3>Simulator Tests<em> system/simulator</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Simulator software test.</p>
|
||||
<h3>Scientific Tests<em> pts/scientific</em>
|
||||
<h3>Scientific Tests<em> system/scientific</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Scientific software test.</p>
|
||||
<h3>Benchmark Tests<em> pts/benchmark</em>
|
||||
<h3>Benchmark Tests<em> system/benchmark</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Benchmark software test.</p>
|
||||
<h3>Application Tests<em> pts/application</em>
|
||||
<h3>Application Tests<em> system/application</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Application software test.</p>
|
||||
<h3>Game Tests<em> pts/game</em>
|
||||
<h3>Game Tests<em> system/game</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Game software test.</p>
|
||||
<h3>Smp Tests<em> pts/smp</em>
|
||||
<h3>Opencl Tests<em> system/opencl</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.</p>
|
||||
<h3>Cuda Tests<em> pts/cuda</em>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing opencl.</p>
|
||||
<h3>Cuda Tests<em> system/cuda</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.</p>
|
||||
<h3>Openmp Tests<em> pts/openmp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.</p>
|
||||
<h3>Python Tests<em> pts/python</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.</p>
|
||||
<h3>Go Tests<em> pts/go</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Vdpau Tests<em> pts/vdpau</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.</p>
|
||||
<h3>Video Tests<em> pts/video</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing video.</p>
|
||||
<h3>Responsiveness Tests<em> pts/responsiveness</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing responsiveness.</p>
|
||||
<h3>Openmpi Tests<em> pts/openmpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmpi.</p>
|
||||
<h3>All WINDOWS Tests<em> windows/all</em>
|
||||
</h3>
|
||||
<p>This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.</p>
|
||||
|
||||
@@ -67,9 +67,6 @@ This option will permanently remove a installed test by the Phoronix Test Suite.
|
||||
|
||||
|
||||
## Testing
|
||||
#### auto-compare
|
||||
This option will autonomously determine the most relevant test(s) to run for any selected sub-system(s). The tests to run are determined via OpenBenchmarking.org integration with the global results pool. Related test results from OpenBenchmarking.org are also merged to provide a straight-forward and effective means of carrying out a system comparison. If wishing to find comparable results for any particular test profile(s), simply pass the test profile names as additional arguments to this command.
|
||||
|
||||
#### benchmark [Test | Suite | OpenBenchmarking ID | Test Result] ...
|
||||
This option will install the selected test(s) (if needed) and will proceed to run the test(s). This option is equivalent to running phoronix-test-suite with the install option followed by the run option. Multiple arguments can be supplied to run additional tests at the same time and save the results into one file.
|
||||
|
||||
@@ -789,6 +786,10 @@ This is similar to the FORCE_TIMES_TO_RUN option but will only be used if the te
|
||||
|
||||
When used in conjunction with FORCE_MIN_TIMES_TO_RUN, the override value will only be applied to test profiles where its average run-time length (in minutes) is less than the value specified by FORCE_MIN_TIMES_TO_RUN_CUTOFF.
|
||||
|
||||
**FORCE_MIN_DURATION_PER_TEST**
|
||||
|
||||
This is similar to FORCE_MIN_TIMES_TO_RUN but allows specifying a time (in minutes) that each test should be run for. Each test will loop at least until that amount of time has elapsed. This can be useful for short-running tests if wanting to ensure each test is run long enough to rule out system noise.
|
||||
|
||||
**IGNORE_RUNS**
|
||||
|
||||
IGNORE_RUNS can be passed a comma-separated list of runs to skip on each benchmark. For example, IGNORE_RUNS=1 would always drop the first run from being recorded.
|
||||
@@ -994,18 +995,24 @@ This is a collection of test profiles found within the specified OpenBenchmarkin
|
||||
#### Cuda Tests pts/cuda
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.
|
||||
|
||||
#### Mpi Tests pts/mpi
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.
|
||||
|
||||
#### Openmp Tests pts/openmp
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.
|
||||
|
||||
#### Cloud Tests pts/cloud
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cloud.
|
||||
|
||||
#### Docker Tests pts/docker
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing docker.
|
||||
|
||||
#### Python Tests pts/python
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.
|
||||
|
||||
#### Go Tests pts/go
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.
|
||||
|
||||
#### Mpi Tests pts/mpi
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.
|
||||
|
||||
#### Vdpau Tests pts/vdpau
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.
|
||||
|
||||
@@ -1159,93 +1166,78 @@ This is a collection of test profiles found within the specified OpenBenchmarkin
|
||||
#### Smp Tests git/smp
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.
|
||||
|
||||
#### All PTS Tests pts/all
|
||||
#### All SYSTEM Tests system/all
|
||||
This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.
|
||||
|
||||
#### Installed Tests pts/installed
|
||||
#### Installed Tests system/installed
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository that are already installed on the system under test.
|
||||
|
||||
#### Every PTS Test pts/everything
|
||||
#### Every SYSTEM Test system/everything
|
||||
This is a collection of every test profile found within the specified OpenBenchmarking.org repository, including unsupported tests.
|
||||
|
||||
#### Linux Operating System Tests pts/linux
|
||||
#### Linux Operating System Tests system/linux
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Linux Operating System.
|
||||
|
||||
#### Solaris Operating System Tests pts/solaris
|
||||
#### Solaris Operating System Tests system/solaris
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Solaris Operating System.
|
||||
|
||||
#### BSD Operating System Tests pts/bsd
|
||||
#### BSD Operating System Tests system/bsd
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the BSD Operating System.
|
||||
|
||||
#### MacOSX Operating System Tests pts/macosx
|
||||
#### MacOSX Operating System Tests system/macosx
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the MacOSX Operating System.
|
||||
|
||||
#### Windows Operating System Tests pts/windows
|
||||
#### Windows Operating System Tests system/windows
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Windows Operating System.
|
||||
|
||||
#### Hurd Operating System Tests pts/hurd
|
||||
#### Hurd Operating System Tests system/hurd
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Hurd Operating System.
|
||||
|
||||
#### System Subsystem Tests pts/system
|
||||
#### System Subsystem Tests system/system
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the System sub-system.
|
||||
|
||||
#### Processor Subsystem Tests pts/processor
|
||||
#### Processor Subsystem Tests system/processor
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Processor sub-system.
|
||||
|
||||
#### Graphics Subsystem Tests pts/graphics
|
||||
#### Disk Subsystem Tests system/disk
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Disk sub-system.
|
||||
|
||||
#### Graphics Subsystem Tests system/graphics
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Graphics sub-system.
|
||||
|
||||
#### Other Subsystem Tests pts/other
|
||||
#### Memory Subsystem Tests system/memory
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Memory sub-system.
|
||||
|
||||
#### Network Subsystem Tests system/network
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Network sub-system.
|
||||
|
||||
#### Other Subsystem Tests system/other
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Other sub-system.
|
||||
|
||||
#### Utility Tests pts/utility
|
||||
#### Utility Tests system/utility
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Utility software test.
|
||||
|
||||
#### Simulator Tests pts/simulator
|
||||
#### Simulator Tests system/simulator
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Simulator software test.
|
||||
|
||||
#### Scientific Tests pts/scientific
|
||||
#### Scientific Tests system/scientific
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Scientific software test.
|
||||
|
||||
#### Benchmark Tests pts/benchmark
|
||||
#### Benchmark Tests system/benchmark
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Benchmark software test.
|
||||
|
||||
#### Application Tests pts/application
|
||||
#### Application Tests system/application
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Application software test.
|
||||
|
||||
#### Game Tests pts/game
|
||||
#### Game Tests system/game
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Game software test.
|
||||
|
||||
#### Smp Tests pts/smp
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.
|
||||
#### Opencl Tests system/opencl
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing opencl.
|
||||
|
||||
#### Cuda Tests pts/cuda
|
||||
#### Cuda Tests system/cuda
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.
|
||||
|
||||
#### Openmp Tests pts/openmp
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.
|
||||
|
||||
#### Python Tests pts/python
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.
|
||||
|
||||
#### Go Tests pts/go
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.
|
||||
|
||||
#### Mpi Tests pts/mpi
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.
|
||||
|
||||
#### Vdpau Tests pts/vdpau
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.
|
||||
|
||||
#### Video Tests pts/video
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing video.
|
||||
|
||||
#### Responsiveness Tests pts/responsiveness
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing responsiveness.
|
||||
|
||||
#### Openmpi Tests pts/openmpi
|
||||
This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmpi.
|
||||
|
||||
#### All WINDOWS Tests windows/all
|
||||
This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.
|
||||
|
||||
|
||||
@@ -41,9 +41,6 @@
|
||||
</h3>
|
||||
<p>This option will permanently remove a installed test by the Phoronix Test Suite.</p>
|
||||
<h1>Testing</h1>
|
||||
<h3>auto-compare<em> </em>
|
||||
</h3>
|
||||
<p>This option will autonomously determine the most relevant test(s) to run for any selected sub-system(s). The tests to run are determined via OpenBenchmarking.org integration with the global results pool. Related test results from OpenBenchmarking.org are also merged to provide a straight-forward and effective means of carrying out a system comparison. If wishing to find comparable results for any particular test profile(s), simply pass the test profile names as additional arguments to this command.</p>
|
||||
<h3>benchmark<em> [Test | Suite | OpenBenchmarking ID | Test Result] ...</em>
|
||||
</h3>
|
||||
<p>This option will install the selected test(s) (if needed) and will proceed to run the test(s). This option is equivalent to running phoronix-test-suite with the install option followed by the run option. Multiple arguments can be supplied to run additional tests at the same time and save the results into one file.</p>
|
||||
|
||||
@@ -61,6 +61,8 @@
|
||||
<p>This is similar to the FORCE_TIMES_TO_RUN option but will only be used if the test profile's run count is less than this defined value.</p>
|
||||
<p><strong>FORCE_MIN_TIMES_TO_RUN_CUTOFF</strong></p>
|
||||
<p>When used in conjunction with FORCE_MIN_TIMES_TO_RUN, the override value will only be applied to test profiles where its average run-time length (in minutes) is less than the value specified by FORCE_MIN_TIMES_TO_RUN_CUTOFF.</p>
|
||||
<p><strong>FORCE_MIN_DURATION_PER_TEST</strong></p>
|
||||
<p>This is similar to FORCE_MIN_TIMES_TO_RUN but allows specifying a time (in minutes) that each test should be run for. Each test will loop at least until that amount of time has elapsed. This can be useful for short-running tests if wanting to ensure each test is run long enough to rule out system noise.</p>
|
||||
<p><strong>IGNORE_RUNS</strong></p>
|
||||
<p>IGNORE_RUNS can be passed a comma-separated list of runs to skip on each benchmark. For example, IGNORE_RUNS=1 would always drop the first run from being recorded.</p>
|
||||
<p><strong>NO_FILE_HASH_CHECKS</strong></p>
|
||||
|
||||
@@ -69,18 +69,24 @@
|
||||
<h3>Cuda Tests<em> pts/cuda</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Openmp Tests<em> pts/openmp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.</p>
|
||||
<h3>Cloud Tests<em> pts/cloud</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cloud.</p>
|
||||
<h3>Docker Tests<em> pts/docker</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing docker.</p>
|
||||
<h3>Python Tests<em> pts/python</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.</p>
|
||||
<h3>Go Tests<em> pts/go</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Vdpau Tests<em> pts/vdpau</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.</p>
|
||||
@@ -234,93 +240,78 @@
|
||||
<h3>Smp Tests<em> git/smp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.</p>
|
||||
<h3>All PTS Tests<em> pts/all</em>
|
||||
<h3>All SYSTEM Tests<em> system/all</em>
|
||||
</h3>
|
||||
<p>This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.</p>
|
||||
<h3>Installed Tests<em> pts/installed</em>
|
||||
<h3>Installed Tests<em> system/installed</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository that are already installed on the system under test.</p>
|
||||
<h3>Every PTS Test<em> pts/everything</em>
|
||||
<h3>Every SYSTEM Test<em> system/everything</em>
|
||||
</h3>
|
||||
<p>This is a collection of every test profile found within the specified OpenBenchmarking.org repository, including unsupported tests.</p>
|
||||
<h3>Linux Operating System Tests<em> pts/linux</em>
|
||||
<h3>Linux Operating System Tests<em> system/linux</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Linux Operating System.</p>
|
||||
<h3>Solaris Operating System Tests<em> pts/solaris</em>
|
||||
<h3>Solaris Operating System Tests<em> system/solaris</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Solaris Operating System.</p>
|
||||
<h3>BSD Operating System Tests<em> pts/bsd</em>
|
||||
<h3>BSD Operating System Tests<em> system/bsd</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the BSD Operating System.</p>
|
||||
<h3>MacOSX Operating System Tests<em> pts/macosx</em>
|
||||
<h3>MacOSX Operating System Tests<em> system/macosx</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the MacOSX Operating System.</p>
|
||||
<h3>Windows Operating System Tests<em> pts/windows</em>
|
||||
<h3>Windows Operating System Tests<em> system/windows</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Windows Operating System.</p>
|
||||
<h3>Hurd Operating System Tests<em> pts/hurd</em>
|
||||
<h3>Hurd Operating System Tests<em> system/hurd</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being compatible with the Hurd Operating System.</p>
|
||||
<h3>System Subsystem Tests<em> pts/system</em>
|
||||
<h3>System Subsystem Tests<em> system/system</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the System sub-system.</p>
|
||||
<h3>Processor Subsystem Tests<em> pts/processor</em>
|
||||
<h3>Processor Subsystem Tests<em> system/processor</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Processor sub-system.</p>
|
||||
<h3>Graphics Subsystem Tests<em> pts/graphics</em>
|
||||
<h3>Disk Subsystem Tests<em> system/disk</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Disk sub-system.</p>
|
||||
<h3>Graphics Subsystem Tests<em> system/graphics</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Graphics sub-system.</p>
|
||||
<h3>Other Subsystem Tests<em> pts/other</em>
|
||||
<h3>Memory Subsystem Tests<em> system/memory</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Memory sub-system.</p>
|
||||
<h3>Network Subsystem Tests<em> system/network</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Network sub-system.</p>
|
||||
<h3>Other Subsystem Tests<em> system/other</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a test of the Other sub-system.</p>
|
||||
<h3>Utility Tests<em> pts/utility</em>
|
||||
<h3>Utility Tests<em> system/utility</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Utility software test.</p>
|
||||
<h3>Simulator Tests<em> pts/simulator</em>
|
||||
<h3>Simulator Tests<em> system/simulator</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Simulator software test.</p>
|
||||
<h3>Scientific Tests<em> pts/scientific</em>
|
||||
<h3>Scientific Tests<em> system/scientific</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Scientific software test.</p>
|
||||
<h3>Benchmark Tests<em> pts/benchmark</em>
|
||||
<h3>Benchmark Tests<em> system/benchmark</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Benchmark software test.</p>
|
||||
<h3>Application Tests<em> pts/application</em>
|
||||
<h3>Application Tests<em> system/application</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Application software test.</p>
|
||||
<h3>Game Tests<em> pts/game</em>
|
||||
<h3>Game Tests<em> system/game</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified as being a Game software test.</p>
|
||||
<h3>Smp Tests<em> pts/smp</em>
|
||||
<h3>Opencl Tests<em> system/opencl</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing smp.</p>
|
||||
<h3>Cuda Tests<em> pts/cuda</em>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing opencl.</p>
|
||||
<h3>Cuda Tests<em> system/cuda</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing cuda.</p>
|
||||
<h3>Openmp Tests<em> pts/openmp</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmp.</p>
|
||||
<h3>Python Tests<em> pts/python</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing python.</p>
|
||||
<h3>Go Tests<em> pts/go</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing go.</p>
|
||||
<h3>Mpi Tests<em> pts/mpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing mpi.</p>
|
||||
<h3>Vdpau Tests<em> pts/vdpau</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing vdpau.</p>
|
||||
<h3>Video Tests<em> pts/video</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing video.</p>
|
||||
<h3>Responsiveness Tests<em> pts/responsiveness</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing responsiveness.</p>
|
||||
<h3>Openmpi Tests<em> pts/openmpi</em>
|
||||
</h3>
|
||||
<p>This is a collection of test profiles found within the specified OpenBenchmarking.org repository where the test profile is specified via an internal tag as testing openmpi.</p>
|
||||
<h3>All WINDOWS Tests<em> windows/all</em>
|
||||
</h3>
|
||||
<p>This is a collection of all test profiles found within the specified OpenBenchmarking.org repository.</p>
|
||||
|
||||
@@ -28,8 +28,14 @@ then
|
||||
fi
|
||||
|
||||
# Full path to root directory of the actual Phoronix Test Suite code
|
||||
export PTS_DIR=$(readlink -f `dirname $0`)
|
||||
#export PTS_DIR=`pwd`
|
||||
if [ -d /Applications ]
|
||||
then
|
||||
# macOS has problems with the readlink code
|
||||
export PTS_DIR=`pwd`
|
||||
else
|
||||
export PTS_DIR=$(readlink -f `dirname $0`)
|
||||
fi
|
||||
|
||||
export PTS_MODE="CLIENT"
|
||||
if which realpath >/dev/null 2>&1 ;
|
||||
then
|
||||
@@ -255,7 +261,11 @@ then
|
||||
cat <<'EOT'
|
||||
|
||||
To run the Phoronix Test Suite locally you must first change directories to phoronix-test-suite/
|
||||
or install the program using the install-sh script. For support visit: https://www.phoronix-test-suite.com/
|
||||
or install the program using the install-sh script.
|
||||
|
||||
For support visit:
|
||||
https://www.phoronix-test-suite.com/
|
||||
https://github.com/phoronix-test-suite/phoronix-test-suite
|
||||
|
||||
EOT
|
||||
exit
|
||||
|
||||
@@ -1,206 +0,0 @@
|
||||
<?php
|
||||
|
||||
/*
|
||||
Phoronix Test Suite
|
||||
URLs: http://www.phoronix.com, http://www.phoronix-test-suite.com/
|
||||
Copyright (C) 2012 - 2018, Phoronix Media
|
||||
Copyright (C) 2012 - 2018, Michael Larabel
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation; either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
class auto_compare implements pts_option_interface
|
||||
{
|
||||
const doc_section = 'Testing';
|
||||
const doc_description = 'This option will autonomously determine the most relevant test(s) to run for any selected sub-system(s). The tests to run are determined via OpenBenchmarking.org integration with the global results pool. Related test results from OpenBenchmarking.org are also merged to provide a straight-forward and effective means of carrying out a system comparison. If wishing to find comparable results for any particular test profile(s), simply pass the test profile names as additional arguments to this command.';
|
||||
|
||||
public static function run($r)
|
||||
{
|
||||
$compare_tests = array();
|
||||
$compare_subsystems = array();
|
||||
foreach($r as $test_object)
|
||||
{
|
||||
$test_object = pts_types::identifier_to_object($test_object);
|
||||
|
||||
if($test_object instanceof pts_test_profile)
|
||||
{
|
||||
$compare_tests[] = $test_object->get_identifier(false);
|
||||
|
||||
if(!isset($compare_subsystems[$test_object->get_test_hardware_type()]))
|
||||
{
|
||||
$compare_subsystems[$test_object->get_test_hardware_type()] = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
$compare_subsystems[$test_object->get_test_hardware_type()] += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(empty($compare_tests))
|
||||
{
|
||||
$subsystem_under_test = pts_user_io::prompt_text_menu('Sub-System To Test', array('Processor', 'Graphics', 'Disk'));
|
||||
}
|
||||
else
|
||||
{
|
||||
arsort($compare_subsystems);
|
||||
$compare_subsystems = array_keys($compare_subsystems);
|
||||
$subsystem_under_test = array_shift($compare_subsystems);
|
||||
}
|
||||
|
||||
$system_info = array_merge(phodevi::system_hardware(false), phodevi::system_software(false));
|
||||
$to_include = array();
|
||||
$to_exclude = array();
|
||||
|
||||
if(isset($system_info[$subsystem_under_test]))
|
||||
{
|
||||
$compare_component = $system_info[$subsystem_under_test];
|
||||
}
|
||||
else
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
switch($subsystem_under_test)
|
||||
{
|
||||
case 'Processor':
|
||||
self::system_component_to_format($system_info, $to_include, array('OS', 'Compiler', 'Kernel', 'Motherboard'), true);
|
||||
break;
|
||||
case 'Graphics':
|
||||
self::system_component_to_format($system_info, $to_include, array('OS', 'Display Driver', 'OpenGL', 'Processor', 'Kernel', 'Desktop'), true);
|
||||
break;
|
||||
case 'OS':
|
||||
self::system_component_to_format($system_info, $to_include, array('Processor', 'Motherboard', 'Graphics', 'Disk'), true);
|
||||
self::system_component_to_format($system_info, $to_exclude, array('OS'));
|
||||
break;
|
||||
case 'Disk':
|
||||
self::system_component_to_format($system_info, $to_include, array('Processor', 'OS', 'Chipset', 'Motherboard', 'Kernel'), true);
|
||||
break;
|
||||
}
|
||||
|
||||
$payload = array(
|
||||
'subsystem_under_test' => $subsystem_under_test,
|
||||
'component_under_test' => $compare_component,
|
||||
'include_components' => implode(',', $to_include),
|
||||
'exclude_components' => implode(',', $to_exclude),
|
||||
'include_tests' => implode(',', $compare_tests),
|
||||
);
|
||||
|
||||
echo PHP_EOL . 'Querying test data from OpenBenchmarking.org...' . PHP_EOL;
|
||||
$json = pts_openbenchmarking::make_openbenchmarking_request('auto_generate_comparison', $payload);
|
||||
$json = json_decode($json, true);
|
||||
|
||||
if(isset($json['auto_compare']['public_ids']) && isset($json['auto_compare']['count']) && $json['auto_compare']['count'] > 0)
|
||||
{
|
||||
echo 'Found ' . $json['auto_compare']['count'] . ' comparable results on OpenBenchmarking.org with a ' . $json['auto_compare']['accuracy'] . '% accuracy.' . PHP_EOL;
|
||||
|
||||
$compare_results = array();
|
||||
|
||||
foreach($json['auto_compare']['public_ids'] as $public_id)
|
||||
{
|
||||
$ret = pts_openbenchmarking::clone_openbenchmarking_result($public_id);
|
||||
if($ret)
|
||||
{
|
||||
$result_file = new pts_result_file($public_id);
|
||||
$result_objects = $result_file->get_result_objects();
|
||||
|
||||
foreach($result_objects as $i => &$result_object)
|
||||
{
|
||||
if(!empty($compare_tests))
|
||||
{
|
||||
if(!in_array($result_object->test_profile->get_identifier(false), $compare_tests))
|
||||
{
|
||||
unset($result_objects[$i]);
|
||||
}
|
||||
}
|
||||
else if($result_object->test_profile->get_test_hardware_type() != $subsystem_under_test)
|
||||
{
|
||||
unset($result_objects[$i]);
|
||||
}
|
||||
}
|
||||
|
||||
if(count($result_objects) == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
$result_file->override_result_objects($result_objects);
|
||||
pts_client::save_test_result($result_file->get_file_location(), $result_file->get_xml());
|
||||
$compare_results[] = $public_id;
|
||||
}
|
||||
}
|
||||
|
||||
if(count($compare_results) > 0)
|
||||
{
|
||||
$result_file = new pts_result_file(null, true);
|
||||
$result_file->merge($compare_results);
|
||||
$result_objects = $result_file->get_result_objects();
|
||||
$system_count = $result_file->get_system_count();
|
||||
$result_count = count($result_objects);
|
||||
$result_match_count = array();
|
||||
|
||||
if($result_count > 3)
|
||||
{
|
||||
foreach($result_objects as $i => &$result_object)
|
||||
{
|
||||
$result_match_count[$i] = $result_object->test_result_buffer->get_count();
|
||||
}
|
||||
|
||||
arsort($result_match_count);
|
||||
$biggest_size = pts_arrays::first_element($result_match_count);
|
||||
if($biggest_size == $system_count || $biggest_size > 3)
|
||||
{
|
||||
foreach($result_match_count as $key => $value)
|
||||
{
|
||||
if($value < 2)
|
||||
{
|
||||
unset($result_objects[$key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
$result_file->override_result_objects($result_objects);
|
||||
}
|
||||
|
||||
pts_client::save_test_result('auto-comparison/composite.xml', $result_file->get_xml());
|
||||
}
|
||||
}
|
||||
|
||||
pts_test_installer::standard_install(array('auto-comparison'));
|
||||
$test_run_manager = new pts_test_run_manager();
|
||||
$test_run_manager->standard_run(array('auto-comparison'));
|
||||
}
|
||||
protected static function system_component_to_format(&$system_info, &$to_array, $component_types, $allow_trim_extra = false)
|
||||
{
|
||||
foreach($component_types as $component_type)
|
||||
{
|
||||
if(isset($system_info[$component_type]))
|
||||
{
|
||||
$value = pts_strings::trim_search_query($system_info[$component_type]);
|
||||
|
||||
if($value != null)
|
||||
{
|
||||
if($allow_trim_extra && !isset($to_array[2]))
|
||||
{
|
||||
$value_r = explode(' ', str_replace('-', ' ', $value));
|
||||
array_pop($value_r);
|
||||
$to_array[] = $component_type . ':' . implode(' ', $value_r);
|
||||
}
|
||||
|
||||
$to_array[] = $component_type . ':' . $value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
?>
|
||||
19
pts-core/external-test-dependencies/scripts/install-centos-packages.sh
Executable file
19
pts-core/external-test-dependencies/scripts/install-centos-packages.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Fedora / Red Hat package installation
|
||||
if [ `whoami` = "root" ] && [ ! -w /usr/bin/sudo ]; then
|
||||
yum -y install $*
|
||||
elif [ -x /usr/bin/dnf ]; then
|
||||
sudo dnf -y --skip-broken install $*
|
||||
elif [ `whoami` = "ec2-user" ]; then
|
||||
sudo yum -y --skip-broken install $*
|
||||
else
|
||||
echo "Please enter your SUDO password below:" 1>&2
|
||||
read -s -p "Password: " passwd
|
||||
if ! echo $passwd | sudo -S -p '' yum -y --skip-broken install $*; then
|
||||
echo "Please enter your ROOT password below:" 1>&2
|
||||
su root -c "yum -y --skip-broken install $*"
|
||||
fi
|
||||
fi
|
||||
|
||||
exit
|
||||
@@ -319,5 +319,17 @@
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>libxml2</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>uuid</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<PackageName>clang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>gflags</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
363
pts-core/external-test-dependencies/xml/centos-packages.xml
Normal file
363
pts-core/external-test-dependencies/xml/centos-packages.xml
Normal file
@@ -0,0 +1,363 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="xsl/pts-exdep-viewer.xsl" ?>
|
||||
<PhoronixTestSuite>
|
||||
<ExternalDependencies>
|
||||
<Information>
|
||||
<Name>CentOS</Name>
|
||||
<Aliases></Aliases>
|
||||
</Information>
|
||||
<Package>
|
||||
<GenericName>common-dependencies</GenericName>
|
||||
<PackageName>unzip bzip2</PackageName>
|
||||
<FileCheck>unzip, bzip2</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>32bit-compatibility</GenericName>
|
||||
<PackageName>glibc.i686 libstdc++.i686 libX11.i686 libXext.i686 libXrandr.i686 libXinerama.i686 mesa-libGL.i686 openal-soft.i686</PackageName>
|
||||
<ArchitectureSpecific>x86_64</ArchitectureSpecific>
|
||||
<FileCheck>/lib/i686, /usr/lib/libstdc++.so.6</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gtk-development</GenericName>
|
||||
<PackageName>gtk2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>sdl-development</GenericName>
|
||||
<PackageName>SDL-devel SDL_gfx-devel SDL_image-devel SDL_net-devel SDL_sound-devel SDL_ttf-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>sdl2-development</GenericName>
|
||||
<PackageName>SDL2 SDL2_image SDL2_mixer SDL2_ttf SDL2-devel SDL2_image-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>tiff</GenericName>
|
||||
<PackageName>libtiff-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>glut</GenericName>
|
||||
<PackageName>freeglut-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>nasm</GenericName>
|
||||
<PackageName>nasm</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>expat</GenericName>
|
||||
<PackageName>expat</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>vulkan-development</GenericName>
|
||||
<PackageName>vulkan-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>openssl-development</GenericName>
|
||||
<PackageName>openssl-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>csh</GenericName>
|
||||
<PackageName>tcsh</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>cairo-development</GenericName>
|
||||
<PackageName>cairo-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libpng-development</GenericName>
|
||||
<PackageName>libpng-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>build-utilities</GenericName>
|
||||
<PackageName>gcc gcc-c++ make autoconf automake patch expat-devel</PackageName>
|
||||
<FileCheck>gcc, c++</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>bison</GenericName>
|
||||
<PackageName>bison</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>flex</GenericName>
|
||||
<PackageName>flex</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>imlib2-development</GenericName>
|
||||
<PackageName>imlib2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>java</GenericName>
|
||||
<PackageName>java-openjdk</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>maven</GenericName>
|
||||
<PackageName>maven</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>portaudio-development</GenericName>
|
||||
<PackageName>portaudio-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>fortran-compiler</GenericName>
|
||||
<PackageName>gcc-gfortran</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>glew</GenericName>
|
||||
<PackageName>glew-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>freeimage</GenericName>
|
||||
<PackageName>freeimage freeimage-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>scons</GenericName>
|
||||
<PackageName>scons</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>zlib-development</GenericName>
|
||||
<PackageName>zlib-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>jpeg-development</GenericName>
|
||||
<PackageName>libjpeg-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libaio-development</GenericName>
|
||||
<PackageName>libaio-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>bc</GenericName>
|
||||
<PackageName>bc</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>pcre</GenericName>
|
||||
<PackageName>pcre-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>perl</GenericName>
|
||||
<PackageName>perl perl-SDL</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>xorg-video</GenericName>
|
||||
<PackageName>libXv-devel libXvMC-devel libvdpau-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libstdcpp5</GenericName>
|
||||
<PackageName>compat-libstdc++-33</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>p7zip</GenericName>
|
||||
<PackageName>p7zip</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>openal-development</GenericName>
|
||||
<PackageName>openal openal-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>vorbis-development</GenericName>
|
||||
<PackageName>taglib-devel libvorbis-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>jam</GenericName>
|
||||
<PackageName>jam</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>qt4-development</GenericName>
|
||||
<PackageName>qt4-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>qt5-development</GenericName>
|
||||
<PackageName>qt5-qtbase-devel qt5-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>autoconf</GenericName>
|
||||
<PackageName>autoconf</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libtool</GenericName>
|
||||
<PackageName>libtool</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libevent</GenericName>
|
||||
<PackageName>libevent-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>ncurses-development</GenericName>
|
||||
<PackageName>ncurses-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>popt</GenericName>
|
||||
<PackageName>popt-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>meson</GenericName>
|
||||
<PackageName> </PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>numa-development</GenericName>
|
||||
<PackageName>numactl-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>curl</GenericName>
|
||||
<PackageName>libcurl curl libcurl-devel curl-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>smartmontools</GenericName>
|
||||
<PackageName>smartmontools</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>fftw3-development</GenericName>
|
||||
<PackageName>fftw3-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>blas-development</GenericName>
|
||||
<PackageName>blas</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>lapack-development</GenericName>
|
||||
<PackageName> </PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>cmake</GenericName>
|
||||
<PackageName>cmake</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>boost-development</GenericName>
|
||||
<PackageName>boost-devel boost-thread</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>rust</GenericName>
|
||||
<PackageName>rust cargo</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>bzip2-development</GenericName>
|
||||
<PackageName>bzip2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>tcl</GenericName>
|
||||
<PackageName>tcl</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>glibc-development</GenericName>
|
||||
<PackageName>glibc-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>python</GenericName>
|
||||
<PackageName>python python3 python3-pip</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>atlas-development</GenericName>
|
||||
<PackageName>atlas-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>openmpi-development</GenericName>
|
||||
<PackageName>openmpi-devel openmpi</PackageName>
|
||||
<FileCheck>openmpi/lib/libmpi.so</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>yasm</GenericName>
|
||||
<PackageName> </PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>lib3ds</GenericName>
|
||||
<PackageName>lib3ds-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gmp-library</GenericName>
|
||||
<PackageName>gmp-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>subversion</GenericName>
|
||||
<PackageName>subversion</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>git</GenericName>
|
||||
<PackageName>git</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>superlu</GenericName>
|
||||
<PackageName>superlu-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>suitesparse</GenericName>
|
||||
<PackageName>suitesparse-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>tinyxml</GenericName>
|
||||
<PackageName>tinyxml-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>opencl</GenericName>
|
||||
<PackageName> </PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>attr</GenericName>
|
||||
<PackageName>attr</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>httpd</GenericName>
|
||||
<PackageName>httpd</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>golang</GenericName>
|
||||
<PackageName>golang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>redis-server</GenericName>
|
||||
<PackageName>redis</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>opencv</GenericName>
|
||||
<PackageName>opencv-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>perl-digest-md5</GenericName>
|
||||
<PackageName>perl-Digest-MD5</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>python-scipy</GenericName>
|
||||
<PackageName>scipy</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>python-sklearn</GenericName>
|
||||
<PackageName>python-scikit-learn</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>V8</GenericName>
|
||||
<PackageName>v8</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>ruby</GenericName>
|
||||
<PackageName>ruby</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>wine</GenericName>
|
||||
<PackageName>wine</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>mongodb</GenericName>
|
||||
<PackageName>mongodb</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>node-npm</GenericName>
|
||||
<PackageName>npm</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>hdf5</GenericName>
|
||||
<PackageName>hdf5-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>libxml2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>uuid-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<PackageName>clang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>gflags-devel</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
@@ -4,7 +4,7 @@
|
||||
<ExternalDependencies>
|
||||
<Information>
|
||||
<Name>Fedora</Name>
|
||||
<Aliases>Moblin, MeeGo, Amazon, Red Hat Enterprise, Red Hat Enterprise Server, Scientific, ScientificSL, CentOS, ClearOS, ClearOS Core Server, Oracle Server, OLPC</Aliases>
|
||||
<Aliases>Moblin, MeeGo, Amazon, Red Hat Enterprise, Red Hat Enterprise Server, Scientific, ScientificSL, ClearOS, ClearOS Core Server, Oracle Server, OLPC</Aliases>
|
||||
<PackageManager>yum</PackageManager>
|
||||
</Information>
|
||||
<Package>
|
||||
@@ -348,5 +348,17 @@
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>libxml2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>uuid-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<PackageName>clang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>gflags-devel</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
@@ -548,5 +548,23 @@
|
||||
<PossibleNames>libxml2</PossibleNames>
|
||||
<FileCheck>libxml2/libxml/parser.h</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<Title>UUID</Title>
|
||||
<PossibleNames>uuid-dev, libuuid-devel</PossibleNames>
|
||||
<FileCheck>uuid/uuid.h</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<Title>LLVM Clang</Title>
|
||||
<PossibleNames>clang</PossibleNames>
|
||||
<FileCheck>clang</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<Title>GFlags</Title>
|
||||
<PossibleNames>gflags</PossibleNames>
|
||||
<FileCheck>gflags/gflags.h</FileCheck>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
@@ -332,5 +332,13 @@
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>dev-libs/libxml2</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>sys-libs/libuuid</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>dev-cpp/gflags</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
@@ -61,6 +61,16 @@
|
||||
<PackageName>cmake</PackageName>
|
||||
<FileCheck>C:\cygwin64\bin\cmake.exe</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>tcl</GenericName>
|
||||
<PackageName>tcl tclsh</PackageName>
|
||||
<FileCheck>C:\cygwin64\bin\tclsh.exe</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>ruby</GenericName>
|
||||
<PackageName>ruby</PackageName>
|
||||
<FileCheck>C:\cygwin64\bin\ruby.exe</FileCheck>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>node-npm</GenericName>
|
||||
<PackageName>https://nodejs.org/dist/latest-v11.x/node-v11.5.0-x64.msi</PackageName>
|
||||
|
||||
@@ -334,5 +334,17 @@
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>libxml2-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>uuid-devel</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<PackageName>clang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>gflags</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
@@ -251,7 +251,7 @@
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>tcl</GenericName>
|
||||
<PackageName>tcl</PackageName>
|
||||
<PackageName>tcl tclsh</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>glibc-development</GenericName>
|
||||
@@ -403,5 +403,17 @@
|
||||
<GenericName>libxml2</GenericName>
|
||||
<PackageName>libxml2-dev</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>uuid</GenericName>
|
||||
<PackageName>uuid-dev</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>clang</GenericName>
|
||||
<PackageName>clang</PackageName>
|
||||
</Package>
|
||||
<Package>
|
||||
<GenericName>gflags</GenericName>
|
||||
<PackageName>libgflags-dev</PackageName>
|
||||
</Package>
|
||||
</ExternalDependencies>
|
||||
</PhoronixTestSuite>
|
||||
|
||||
@@ -141,7 +141,7 @@ class ob_auto_compare extends pts_module_interface
|
||||
protected static function request_compare_from_ob(&$result_object, $comparison_hash, $system_type)
|
||||
{
|
||||
$terminal_width = pts_client::terminal_width();
|
||||
if(!pts_network::internet_support_available() || self::$response_time > 15 || $terminal_width < 55)
|
||||
if(!pts_network::internet_support_available() || self::$response_time > 15 || $terminal_width < 52)
|
||||
{
|
||||
// If no network or OB requests are being slow...
|
||||
return false;
|
||||
@@ -279,6 +279,12 @@ class ob_auto_compare extends pts_module_interface
|
||||
$results_at_pos[] = $this_result_pos - 1;
|
||||
$results_at_pos[] = $this_result_pos + 1;
|
||||
|
||||
if($terminal_width <= 80)
|
||||
{
|
||||
// Try to shorten up some components/identifiers if terminal narrow to fit in more data
|
||||
$component = trim(str_replace(array('AMD', 'Intel', 'NVIDIA', 'Radeon', 'GeForce', ' '), '', $component));
|
||||
}
|
||||
|
||||
foreach(array('-Core', ' with ') as $cutoff)
|
||||
{
|
||||
// On AMD product strings, trip the XX-Core from string to save space...
|
||||
|
||||
@@ -49,7 +49,7 @@ class system_monitor extends pts_module_interface
|
||||
private static $sensor_monitoring_frequency = 1;
|
||||
private static $test_run_timer = 0;
|
||||
private static $perf_per_sensor_collection;
|
||||
private static $perf_per_sensor = null;
|
||||
private static $perf_per_sensor = false;
|
||||
|
||||
public static function module_environmental_variables()
|
||||
{
|
||||
@@ -108,6 +108,7 @@ class system_monitor extends pts_module_interface
|
||||
return pts_module::MODULE_UNLOAD;
|
||||
}
|
||||
|
||||
putenv('FORCE_MIN_DURATION_PER_TEST=1'); // force each test to run at least one minute to ensure sufficient samples
|
||||
}
|
||||
|
||||
public static function __pre_test_run($test_run_request)
|
||||
@@ -381,38 +382,40 @@ class system_monitor extends pts_module_interface
|
||||
|
||||
private static function enable_perf_per_sensor(&$sensor_parameters)
|
||||
{
|
||||
self::$perf_per_sensor = array();
|
||||
if(pts_module::read_variable('PERFORMANCE_PER_WATT'))
|
||||
{
|
||||
// We need to ensure the system power consumption is being tracked to get performance-per-Watt
|
||||
self::$perf_per_sensor = array('sys', 'power');
|
||||
if(empty($sensor_parameters['sys']['power']))
|
||||
{
|
||||
$sensor_parameters['sys']['power'] = array();
|
||||
}
|
||||
|
||||
self::$perf_per_sensor_collection = array();
|
||||
self::$perf_per_sensor[] = array('sys', 'power');
|
||||
echo PHP_EOL . 'To Provide Performance-Per-Watt Outputs.' . PHP_EOL;
|
||||
}
|
||||
else if(pts_module::read_variable('PERFORMANCE_PER_SENSOR'))
|
||||
if(pts_module::read_variable('PERFORMANCE_PER_SENSOR'))
|
||||
{
|
||||
// We need to ensure the system power consumption is being tracked to get performance-per-(arbitrary sensor)
|
||||
$per_sensor = explode('.', pts_module::read_variable('PERFORMANCE_PER_SENSOR'));
|
||||
if(count($per_sensor) == 2)
|
||||
foreach(explode(',', pts_module::read_variable('PERFORMANCE_PER_SENSOR')) as $s)
|
||||
{
|
||||
self::$perf_per_sensor = $per_sensor;
|
||||
$per_sensor = explode('.', $s);
|
||||
if(count($per_sensor) == 2)
|
||||
{
|
||||
self::$perf_per_sensor[] = $per_sensor;
|
||||
echo PHP_EOL . 'To Provide Performance-Per-Sensor Outputs for ' . $per_sensor[0] . '.' . $per_sensor[1] . '.' . PHP_EOL;
|
||||
}
|
||||
}
|
||||
else
|
||||
}
|
||||
|
||||
if(empty(self::$perf_per_sensor))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach(self::$perf_per_sensor as $i => $s)
|
||||
{
|
||||
if(empty($sensor_parameters[$s[0]][$s[1]]))
|
||||
{
|
||||
return false;
|
||||
$sensor_parameters[$s[0]][$s[1]] = array();
|
||||
}
|
||||
|
||||
if(empty($sensor_parameters[self::$perf_per_sensor[0]][self::$perf_per_sensor[1]]))
|
||||
{
|
||||
$sensor_parameters[self::$perf_per_sensor[0]][self::$perf_per_sensor[1]] = array();
|
||||
}
|
||||
|
||||
self::$perf_per_sensor_collection = array();
|
||||
echo PHP_EOL . 'To Provide Performance-Per-Sensor Outputs for ' . self::$perf_per_sensor[0] . '.' . self::$perf_per_sensor[1] . '.' . PHP_EOL;
|
||||
self::$perf_per_sensor_collection[$i] = array();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -602,51 +605,54 @@ class system_monitor extends pts_module_interface
|
||||
|
||||
private static function process_perf_per_sensor(&$result_file)
|
||||
{
|
||||
$sensor_results = self::parse_monitor_log('logs/' . phodevi::sensor_identifier(self::$perf_per_sensor), self::$individual_test_run_offsets[phodevi::sensor_identifier(self::$perf_per_sensor)]);
|
||||
|
||||
if(count($sensor_results) > 2 && self::$successful_test_run_request)
|
||||
foreach(self::$perf_per_sensor as $i => $s)
|
||||
{
|
||||
// Copy the value each time as if you are directly writing the original data, each succeeding time in the loop the used arguments gets borked
|
||||
$test_result = clone self::$successful_test_run_request;
|
||||
$unit = 'Watt';
|
||||
$sensor_results = self::parse_monitor_log('logs/' . phodevi::sensor_identifier($s), self::$individual_test_run_offsets[phodevi::sensor_identifier($s)]);
|
||||
|
||||
$res_average = pts_math::arithmetic_mean($sensor_results);
|
||||
switch(phodevi::read_sensor_unit(self::$perf_per_sensor))
|
||||
if(count($sensor_results) > 2 && self::$successful_test_run_request)
|
||||
{
|
||||
case 'Milliwatts':
|
||||
$watt_average = $watt_average / 1000;
|
||||
case 'Watts':
|
||||
break;
|
||||
default:
|
||||
$unit = phodevi::read_sensor_unit(self::$perf_per_sensor);
|
||||
}
|
||||
// Copy the value each time as if you are directly writing the original data, each succeeding time in the loop the used arguments gets borked
|
||||
$test_result = clone self::$successful_test_run_request;
|
||||
$unit = 'Watt';
|
||||
|
||||
if(!empty($unit) && $res_average > 0 && $test_result->test_profile->get_display_format() == 'BAR_GRAPH')
|
||||
{
|
||||
$test_result->test_profile->set_identifier(null);
|
||||
//$test_result->set_used_arguments_description(phodevi::sensor_name('sys.power') . ' Monitor');
|
||||
//$test_result->set_used_arguments(phodevi::sensor_name('sys.power') . ' ' . $test_result->get_arguments());
|
||||
$test_result->test_result_buffer = new pts_test_result_buffer();
|
||||
$res_average = pts_math::arithmetic_mean($sensor_results);
|
||||
switch(phodevi::read_sensor_unit($s))
|
||||
{
|
||||
case 'Milliwatts':
|
||||
$res_average = $res_average / 1000;
|
||||
case 'Watts':
|
||||
break;
|
||||
default:
|
||||
$unit = phodevi::read_sensor_unit($s);
|
||||
}
|
||||
|
||||
if($test_result->test_profile->get_result_proportion() == 'HIB')
|
||||
if(!empty($unit) && $res_average > 0 && $test_result->test_profile->get_display_format() == 'BAR_GRAPH')
|
||||
{
|
||||
$test_result->test_profile->set_result_scale($test_result->test_profile->get_result_scale() . ' Per ' . $unit);
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision($test_result->active->get_result() / $res_average));
|
||||
$ro = $result_file->add_result_return_object($test_result);
|
||||
$test_result->test_profile->set_identifier(null);
|
||||
//$test_result->set_used_arguments_description(phodevi::sensor_name('sys.power') . ' Monitor');
|
||||
//$test_result->set_used_arguments(phodevi::sensor_name('sys.power') . ' ' . $test_result->get_arguments());
|
||||
$test_result->test_result_buffer = new pts_test_result_buffer();
|
||||
|
||||
if($test_result->test_profile->get_result_proportion() == 'HIB')
|
||||
{
|
||||
$test_result->test_profile->set_result_scale($test_result->test_profile->get_result_scale() . ' Per ' . $unit);
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision($test_result->active->get_result() / $res_average));
|
||||
$ro = $result_file->add_result_return_object($test_result);
|
||||
}
|
||||
else if($test_result->test_profile->get_result_proportion() == 'LIB')
|
||||
{
|
||||
return; // with below code not rendering nicely
|
||||
$test_result->test_profile->set_result_proportion('HIB');
|
||||
$test_result->test_profile->set_result_scale('Performance Per ' . $unit);
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision((1 / $test_result->active->get_result()) / $res_average));
|
||||
$ro = $result_file->add_result_return_object($test_result);
|
||||
}
|
||||
if($ro)
|
||||
{
|
||||
pts_client::$display->test_run_success_inline($ro);
|
||||
}
|
||||
self::$perf_per_sensor_collection[$i][] = $test_result->active->get_result();
|
||||
}
|
||||
else if($test_result->test_profile->get_result_proportion() == 'LIB')
|
||||
{
|
||||
return; // with below code not rendering nicely
|
||||
$test_result->test_profile->set_result_proportion('HIB');
|
||||
$test_result->test_profile->set_result_scale('Performance Per ' . $unit);
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision((1 / $test_result->active->get_result()) / $res_average));
|
||||
$ro = $result_file->add_result_return_object($test_result);
|
||||
}
|
||||
if($ro)
|
||||
{
|
||||
pts_client::$display->test_run_success_inline($ro);
|
||||
}
|
||||
self::$perf_per_sensor_collection[] = $test_result->active->get_result();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -654,25 +660,28 @@ class system_monitor extends pts_module_interface
|
||||
// Saves average of perf-per-watt results to the result file.
|
||||
private static function process_perf_per_sensor_collection(&$test_run_manager)
|
||||
{
|
||||
if(is_array(self::$perf_per_sensor_collection) && count(self::$perf_per_sensor_collection) > 2)
|
||||
foreach(self::$perf_per_sensor as $i => $s)
|
||||
{
|
||||
// Performance per watt/sensor overall
|
||||
$unit = self::$perf_per_sensor == array('sys', 'power') ? 'Watt' : phodevi::read_sensor_unit(self::$perf_per_sensor);
|
||||
$avg = pts_math::geometric_mean(self::$perf_per_sensor_collection);
|
||||
$test_profile = new pts_test_profile();
|
||||
$test_result = new pts_test_result($test_profile);
|
||||
$test_result->test_profile->set_test_title('Meta Performance Per ' . $unit);
|
||||
$test_result->test_profile->set_identifier(null);
|
||||
$test_result->test_profile->set_version(null);
|
||||
$test_result->test_profile->set_result_proportion(null);
|
||||
$test_result->test_profile->set_display_format('BAR_GRAPH');
|
||||
$test_result->test_profile->set_result_scale('Performance Per ' . $unit);
|
||||
$test_result->test_profile->set_result_proportion('HIB');
|
||||
$test_result->set_used_arguments_description('Performance Per ' . $unit);
|
||||
$test_result->set_used_arguments('Per-Per-' . $unit);
|
||||
$test_result->test_result_buffer = new pts_test_result_buffer();
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision($avg, 4));
|
||||
$test_run_manager->result_file->add_result($test_result);
|
||||
if(is_array(self::$perf_per_sensor_collection[$i]) && count(self::$perf_per_sensor_collection[$i]) > 2)
|
||||
{
|
||||
// Performance per watt/sensor overall
|
||||
$unit = phodevi::read_sensor_unit($s);
|
||||
$avg = pts_math::geometric_mean(self::$perf_per_sensor_collection[$i]);
|
||||
$test_profile = new pts_test_profile();
|
||||
$test_result = new pts_test_result($test_profile);
|
||||
$test_result->test_profile->set_test_title('Meta Performance Per ' . $unit);
|
||||
$test_result->test_profile->set_identifier(null);
|
||||
$test_result->test_profile->set_version(null);
|
||||
$test_result->test_profile->set_result_proportion(null);
|
||||
$test_result->test_profile->set_display_format('BAR_GRAPH');
|
||||
$test_result->test_profile->set_result_scale('Performance Per ' . $unit);
|
||||
$test_result->test_profile->set_result_proportion('HIB');
|
||||
$test_result->set_used_arguments_description('Performance Per ' . $unit);
|
||||
$test_result->set_used_arguments('Per-Per-' . $unit);
|
||||
$test_result->test_result_buffer = new pts_test_result_buffer();
|
||||
$test_result->test_result_buffer->add_test_result(self::$result_identifier, pts_math::set_precision($avg, 4));
|
||||
$test_run_manager->result_file->add_result($test_result);
|
||||
}
|
||||
}
|
||||
}
|
||||
private static function process_test_run_results(&$sensor, &$result_file)
|
||||
|
||||
@@ -437,7 +437,7 @@ class pts_test_execution
|
||||
// Dynamically increase run count if needed for statistical significance or other reasons
|
||||
$first_tr = array_slice($test_run_request->generated_result_buffers, 0, 1);
|
||||
$first_tr = array_shift($first_tr);
|
||||
$increase_run_count = $test_run_manager->increase_run_count_check($test_run_request, $first_tr->active, $defined_times_to_run); // XXX maybe check all generated buffers to see if to extend?
|
||||
$increase_run_count = $test_run_manager->increase_run_count_check($test_run_request, $first_tr->active, $defined_times_to_run, $time_test_start_actual); // XXX maybe check all generated buffers to see if to extend?
|
||||
|
||||
if($increase_run_count === -1)
|
||||
{
|
||||
|
||||
@@ -110,7 +110,7 @@ class pts_test_run_manager
|
||||
{
|
||||
$this->skip_post_execution_options = true;
|
||||
}
|
||||
public function increase_run_count_check(&$test_run_request, &$active_result_buffer, $scheduled_times_to_run)
|
||||
public function increase_run_count_check(&$test_run_request, &$active_result_buffer, $scheduled_times_to_run, $time_test_started = 0)
|
||||
{
|
||||
// returning false here will not yield extra test run, returning true will yield additional test run, returning -1 will abort/not-save current test result
|
||||
|
||||
@@ -133,6 +133,16 @@ class pts_test_run_manager
|
||||
}
|
||||
}
|
||||
|
||||
if($time_test_started && ($min_duration = getenv('FORCE_MIN_DURATION_PER_TEST')) != false)
|
||||
{
|
||||
// FORCE_MIN_DURATION_PER_TEST if wanting to force a test to run at least for a given amount of time (minutes)
|
||||
$time_test_elapsed_so_far = microtime(true) - $time_test_started;
|
||||
if(is_numeric($min_duration) && $time_test_elapsed_so_far < ($min_duration * 60))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Compute average time taking per test run (in seconds)
|
||||
$avg_test_run_time = pts_math::arithmetic_mean($test_run_request->test_run_times);
|
||||
|
||||
@@ -532,7 +542,7 @@ class pts_test_run_manager
|
||||
$components = pts_result_file_analyzer::system_component_string_to_array(phodevi::system_hardware(true) . ', ' . phodevi::system_software(true));
|
||||
if($subsystem != null && isset($components[$subsystem]))
|
||||
{
|
||||
$subsystem_name = pts_strings::trim_search_query($components[$subsystem]);
|
||||
$subsystem_name = trim(pts_strings::trim_search_query($components[$subsystem]));
|
||||
|
||||
if(!empty($subsystem_name) && phodevi::is_vendor_string($subsystem_name) && !in_array($subsystem_name, $subsystem_r))
|
||||
{
|
||||
@@ -961,6 +971,12 @@ class pts_test_run_manager
|
||||
$notes['cpu-scaling-governor'] = $scaling_governor;
|
||||
}
|
||||
|
||||
$cpu_microcode = phodevi::read_property('cpu', 'microcode-version');
|
||||
if($cpu_microcode)
|
||||
{
|
||||
$notes['cpu-microcode'] = $cpu_microcode;
|
||||
}
|
||||
|
||||
// POWER processors have configurable SMT, 1-8 per core.
|
||||
$smt = phodevi::read_property('cpu', 'smt');
|
||||
if($smt)
|
||||
|
||||
@@ -184,6 +184,9 @@ class pts_tests
|
||||
$extra_vars['MESA_VK_WSI_PRESENT_MODE'] = 'immediate'; // https://cgit.freedesktop.org/mesa/mesa/commit/?id=a182adfd83ad00e326153b00a725a014e0359bf0
|
||||
$extra_vars['__GL_SYNC_TO_VBLANK'] = '0'; // Avoid sync to vblank with the NVIDIA binary drivers
|
||||
$extra_vars['CCACHE_DISABLE'] = '1'; // Should avoid ccache being used in compiler tests
|
||||
$extra_vars['OMPI_ALLOW_RUN_AS_ROOT'] = '1'; // Tests with mpirun should use --allow-run-as-root but otherwise this fallback
|
||||
$extra_vars['OMPI_ALLOW_RUN_AS_ROOT_CONFIRM'] = '1'; // Tests with mpirun should use --allow-run-as-root but otherwise this fallback
|
||||
|
||||
|
||||
foreach($test_profile->extended_test_profiles() as $i => $this_test_profile)
|
||||
{
|
||||
|
||||
@@ -1348,7 +1348,7 @@ class phodevi_gpu extends phodevi_device_interface
|
||||
$info = 'AMD ' . $info;
|
||||
}
|
||||
|
||||
if(phodevi::is_linux() && ($vendor = phodevi_linux_parser::read_pci_subsystem_value('VGA compatible controller')) != null && stripos($info, $vendor) === false && (stripos($info, 'AMD') !== false || stripos($info, 'NVIDIA') !== false))
|
||||
if(phodevi::is_linux() && ($vendor = phodevi_linux_parser::read_pci_subsystem_value('VGA compatible controller')) != null && stripos($info, $vendor) === false && (stripos($info, 'AMD') !== false || stripos($info, 'NVIDIA') !== false || stripos($info, 'Intel') !== false))
|
||||
{
|
||||
$info = $vendor . ' ' . $info;
|
||||
}
|
||||
@@ -1381,6 +1381,9 @@ class phodevi_gpu extends phodevi_device_interface
|
||||
// Last possible fallback...
|
||||
$info = str_replace(' FB', '', pts_file_io::file_get_contents('/sys/class/graphics/fb0/name'));
|
||||
}
|
||||
|
||||
// Happens with Intel Iris Gallium3D
|
||||
$info = str_replace('Mesa ', ' ', $info);
|
||||
/*if(empty($info))
|
||||
{
|
||||
$info = 'Unknown';
|
||||
|
||||
@@ -47,6 +47,10 @@ class phodevi_monitor extends phodevi_device_interface
|
||||
{
|
||||
$monitor = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
$monitor = array($monitor);
|
||||
}
|
||||
}
|
||||
else if(phodevi::is_nvidia_graphics() && isset(phodevi::$vfs->xorg_log))
|
||||
{
|
||||
|
||||
@@ -64,6 +64,17 @@ class sys_power extends phodevi_sensor
|
||||
}
|
||||
return true;
|
||||
}
|
||||
if(pts_client::executable_in_path('wattsup'))
|
||||
{
|
||||
$wattsup = self::watts_up_power_meter();
|
||||
|
||||
if($wattsup > 0.5 && is_numeric($wattsup))
|
||||
{
|
||||
self::$wattsup_meter = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
$test = self::sys_battery_power();
|
||||
if(is_numeric($test) && $test != -1)
|
||||
{
|
||||
@@ -78,16 +89,6 @@ class sys_power extends phodevi_sensor
|
||||
return true;
|
||||
}
|
||||
|
||||
if(pts_client::executable_in_path('wattsup'))
|
||||
{
|
||||
$wattsup = self::watts_up_power_meter();
|
||||
|
||||
if($wattsup > 0.5 && is_numeric($wattsup))
|
||||
{
|
||||
self::$wattsup_meter = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if(is_readable('/sys/bus/i2c/drivers/ina3221x/0-0041/iio:device1/in_power0_input'))
|
||||
{
|
||||
$in_power0_input = pts_file_io::file_get_contents('/sys/bus/i2c/drivers/ina3221x/0-0041/iio:device1/in_power0_input');
|
||||
|
||||
@@ -362,30 +362,31 @@ abstract class pts_graph_core
|
||||
protected function get_paint_color($identifier, $check_branding = false)
|
||||
{
|
||||
// For now to try to improve the color handling of line graphs, first try to use a pre-defined pool of colors until falling back to the old color code once exhausted
|
||||
if(!isset(self::$color_cache[$identifier]) || $check_branding)
|
||||
if(!isset(self::$color_cache[$check_branding][$identifier]))
|
||||
{
|
||||
if(!empty(self::$c['color']['paint']))
|
||||
{
|
||||
self::$color_cache[$identifier] = array_shift(self::$c['color']['paint']);
|
||||
self::$color_cache[$check_branding][$identifier] = array_shift(self::$c['color']['paint']);
|
||||
}
|
||||
else
|
||||
{
|
||||
self::$color_cache[$identifier] = sprintf('#%06X', mt_rand(0, 0xFFFFFF));
|
||||
self::$color_cache[$check_branding][$identifier] = sprintf('#%06X', mt_rand(0, 0xFFFFFF));
|
||||
}
|
||||
|
||||
if($check_branding)
|
||||
{
|
||||
self::$color_cache[$identifier] = self::identifier_to_branded_color($identifier, self::$color_cache[$identifier]);
|
||||
self::$color_cache[$check_branding][$identifier] = self::identifier_to_branded_color($identifier, self::$color_cache[$check_branding][$identifier]);
|
||||
}
|
||||
}
|
||||
|
||||
return self::$color_cache[$identifier];
|
||||
return self::$color_cache[$check_branding][$identifier];
|
||||
}
|
||||
protected function maximum_graph_value()
|
||||
{
|
||||
$real_maximum = 0;
|
||||
|
||||
$data_max = $this->test_result->test_result_buffer->get_max_value();
|
||||
$max_precision = pts_math::get_precision($data_max);
|
||||
if(!is_numeric($data_max))
|
||||
{
|
||||
if(is_array($data_max))
|
||||
@@ -411,13 +412,14 @@ abstract class pts_graph_core
|
||||
|
||||
if($maximum > 1)
|
||||
{
|
||||
round($maximum);
|
||||
pts_math::set_precision($maximum, $max_precision);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$maximum = (floor(round($real_maximum * $this->i['graph_max_value_multiplier']) / $this->i['mark_count']) + 1) * $this->i['mark_count'];
|
||||
$maximum = round(ceil($maximum / $this->i['mark_count']), (0 - strlen($maximum) + 2)) * $this->i['mark_count'];
|
||||
$maximum = pts_math::set_precision($maximum, $max_precision);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
@@ -28,7 +28,7 @@ class pts_graph_run_vs_run extends pts_graph_core
|
||||
|
||||
public static function cmp_result_object_sort($a, $b)
|
||||
{
|
||||
return strcmp($b->get_spread(), $a->get_spread());
|
||||
return strcmp($b->get_spread(false), $a->get_spread(false));
|
||||
|
||||
return strcmp($a, $b);
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@ class pts_graph_vertical_bars extends pts_graph_core
|
||||
$bar_width = floor(($this->i['identifier_width'] - $separator_width - ($bar_count * $separator_width)) / $bar_count);
|
||||
$bar_font_size_ratio = 1;
|
||||
|
||||
while(floor($bar_width * 0.8) < self::text_string_width($this->i['graph_max_value'] + 0.01, floor(self::$c['size']['bars'] * $bar_font_size_ratio)) && $bar_font_size_ratio >= 0.6)
|
||||
while(floor($bar_width * 0.82) < self::text_string_width($this->i['graph_max_value'], floor(self::$c['size']['bars'] * $bar_font_size_ratio)) && $bar_font_size_ratio >= 0.3)
|
||||
{
|
||||
$bar_font_size_ratio -= 0.05;
|
||||
}
|
||||
|
||||
@@ -185,7 +185,7 @@ class pts_result_file_analyzer
|
||||
$test_result->test_result_buffer->add_test_result($identifier, pts_math::set_precision($values, 3));
|
||||
}
|
||||
|
||||
if(!$result_file->is_multi_way_comparison() || $do_sort)
|
||||
if((!$result_file->is_multi_way_comparison() && !$test_result->test_result_buffer->result_identifier_differences_only_numeric()) || $do_sort)
|
||||
{
|
||||
$test_result->sort_results_by_performance();
|
||||
$test_result->test_result_buffer->buffer_values_reverse();
|
||||
@@ -934,10 +934,23 @@ class pts_result_file_analyzer
|
||||
unset($json['disk-scheduler']);
|
||||
unset($json['disk-mount-options']);
|
||||
}
|
||||
if(isset($json['cpu-scaling-governor']))
|
||||
if(isset($json['cpu-scaling-governor']) || isset($json['cpu-microcode']))
|
||||
{
|
||||
$system_attributes['Processor'][$identifier] = 'Scaling Governor: ' . $json['cpu-scaling-governor'];
|
||||
unset($json['cpu-scaling-governor']);
|
||||
$cpu_data = array();
|
||||
|
||||
if(!empty($json['cpu-scaling-governor']))
|
||||
{
|
||||
$cpu_data[] = 'Scaling Governor: ' . $json['cpu-scaling-governor'];
|
||||
unset($json['cpu-scaling-governor']);
|
||||
}
|
||||
|
||||
if(!empty($json['cpu-microcode']))
|
||||
{
|
||||
$cpu_data[] = 'CPU Microcode: ' . $json['cpu-microcode'];
|
||||
unset($json['cpu-microcode']);
|
||||
}
|
||||
|
||||
$system_attributes['Processor'][$identifier] = implode(' - ', $cpu_data);
|
||||
}
|
||||
if(isset($json['cpu-smt']))
|
||||
{
|
||||
|
||||
@@ -436,7 +436,10 @@ class pts_result_file_output
|
||||
$repeat_length = $longest_result - strlen($val);
|
||||
$result_line .= ($repeat_length >= 0 ? str_repeat(' ', $repeat_length) : null) . '|';
|
||||
$current_line_length = strlen($result_line);
|
||||
$result_line .= str_repeat('=', max(0, round(($val / $max_value) * ($terminal_width - $current_line_length))));
|
||||
if($max_value > 0)
|
||||
{
|
||||
$result_line .= str_repeat('=', max(0, round(($val / $max_value) * ($terminal_width - $current_line_length))));
|
||||
}
|
||||
}
|
||||
else if($result_object->test_profile->get_display_format() == 'PASS_FAIL')
|
||||
{
|
||||
|
||||
@@ -27,6 +27,7 @@ class pts_test_result
|
||||
private $used_arguments;
|
||||
private $used_arguments_description;
|
||||
private $result_precision = 2;
|
||||
private $overrode_default_precision = false;
|
||||
|
||||
public $test_profile;
|
||||
public $test_result_buffer;
|
||||
@@ -87,12 +88,28 @@ class pts_test_result
|
||||
$this->used_arguments_description .= ($this->used_arguments_description != null && $arguments_description[0] != ' ' ? ' ' : null) . $arguments_description;
|
||||
}
|
||||
}
|
||||
public function set_result_precision($precision = 2)
|
||||
public function set_result_precision($precision)
|
||||
{
|
||||
if(!is_numeric($precision) || $precision < 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
$this->result_precision = $precision;
|
||||
$this->overrode_default_precision = true;
|
||||
}
|
||||
public function get_result_precision()
|
||||
{
|
||||
if(!$this->overrode_default_precision && isset($this->active->results) && !empty($this->active->results))
|
||||
{
|
||||
// default precision
|
||||
$p = pts_math::get_precision($this->active->results);
|
||||
if($p > 0 && $p < 10)
|
||||
{
|
||||
return $p;
|
||||
}
|
||||
}
|
||||
|
||||
return $this->result_precision;
|
||||
}
|
||||
public function set_used_arguments($used_arguments)
|
||||
@@ -247,9 +264,9 @@ class pts_test_result
|
||||
|
||||
return $winner;
|
||||
}
|
||||
public function get_spread()
|
||||
public function get_spread($noisy_check = true)
|
||||
{
|
||||
if($this->has_noisy_result())
|
||||
if($noisy_check && $this->has_noisy_result())
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
@@ -306,13 +306,33 @@ class pts_test_result_buffer
|
||||
|
||||
return $identifier;
|
||||
}
|
||||
public function result_identifier_differences_only_numeric()
|
||||
{
|
||||
if(!isset($this->buffer_items[0]))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
$first_result = pts_strings::remove_from_string($this->buffer_items[0]->get_result_identifier(), pts_strings::CHAR_NUMERIC | pts_strings::CHAR_DECIMAL | pts_strings::CHAR_DASH);
|
||||
for($i = 1; $i < count($this->buffer_items); $i++)
|
||||
{
|
||||
$result = pts_strings::remove_from_string($this->buffer_items[$i]->get_result_identifier(), pts_strings::CHAR_NUMERIC | pts_strings::CHAR_DECIMAL | pts_strings::CHAR_DASH);
|
||||
if($result != $first_result)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
public function get_max_value($return_identifier = false)
|
||||
{
|
||||
$value = 0;
|
||||
$max_id = null;
|
||||
$precision = 2;
|
||||
|
||||
foreach($this->buffer_items as &$buffer_item)
|
||||
{
|
||||
$precision = max($precision, pts_math::get_precision($buffer_item->get_result_value()));
|
||||
if($buffer_item->get_result_value() > $value)
|
||||
{
|
||||
$value = $buffer_item->get_result_value();
|
||||
@@ -320,7 +340,7 @@ class pts_test_result_buffer
|
||||
}
|
||||
}
|
||||
|
||||
return $return_identifier ? $max_id : $value;
|
||||
return $return_identifier ? $max_id : pts_math::set_precision($value, $precision);
|
||||
}
|
||||
public function get_min_value($return_identifier = false)
|
||||
{
|
||||
@@ -403,6 +423,17 @@ class pts_test_result_buffer
|
||||
{
|
||||
$precision = 0;
|
||||
}
|
||||
else if(false) // TODO XXX investigate whether this code path helps for vertical bar graphs with varying precision lengths
|
||||
{
|
||||
$precision = 0;
|
||||
foreach($this->buffer_items as &$buffer_item)
|
||||
{
|
||||
if(is_numeric(($val = $buffer_item->get_result_value())))
|
||||
{
|
||||
$precision = max($precision, pts_math::get_precision($val));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if(is_numeric($precision))
|
||||
{
|
||||
|
||||
@@ -136,7 +136,7 @@ class pts_test_result_parser
|
||||
$end_time = microtime(true);
|
||||
|
||||
// Delta time
|
||||
$result_value = $end_time - $sensor_r[3];
|
||||
$result_value = round($end_time - $sensor_r[3], 3);
|
||||
|
||||
$minimal_test_time = pts_config::read_user_config('PhoronixTestSuite/Options/TestResultValidation/MinimalTestTime', 2);
|
||||
if($result_value < $minimal_test_time)
|
||||
|
||||
@@ -278,10 +278,10 @@ function pts_version_codenames()
|
||||
);
|
||||
}
|
||||
|
||||
pts_define('PTS_VERSION', '9.0.1');
|
||||
pts_define('PTS_CORE_VERSION', 9010);
|
||||
pts_define('PTS_RELEASE_DATE', '20191001');
|
||||
pts_define('PTS_CODENAME', 'Asker');
|
||||
pts_define('PTS_VERSION', '9.2.0m2');
|
||||
pts_define('PTS_CORE_VERSION', 9120);
|
||||
pts_define('PTS_RELEASE_DATE', '20191113');
|
||||
pts_define('PTS_CODENAME', 'Hurdal');
|
||||
|
||||
pts_define('PTS_IS_CLIENT', (defined('PTS_MODE') && strstr(PTS_MODE, 'CLIENT') !== false));
|
||||
pts_define('PTS_IS_WEB_CLIENT', (defined('PTS_MODE') && PTS_MODE == 'WEB_CLIENT'));
|
||||
|
||||
Reference in New Issue
Block a user