diff --git a/scripts/process_angle_perf_results.py b/scripts/process_angle_perf_results.py index e92f84cb1..bf5dccbd0 100755 --- a/scripts/process_angle_perf_results.py +++ b/scripts/process_angle_perf_results.py @@ -293,6 +293,17 @@ def process_perf_results(output_json, benchmark_enabled_map = _handle_perf_json_test_results(benchmark_directory_map, test_results_list) + for benchmark_name, directories in benchmark_directory_map.items(): + if not benchmark_enabled_map.get(benchmark_name, False): + continue + + for directory in directories: + with open(os.path.join(directory, 'angle_metrics.json')) as f: + metrics = json.load(f) + metric_names = list(set(d['metric'] for d in group for group in metrics)) + logging.info('angle_metrics: len=%d metrics=%s (directory=%s)' % + (len(metrics), '|'.join(metric_names), directory)) + if not smoke_test_mode and handle_perf: build_properties_map = json.loads(build_properties) if not configuration_name: diff --git a/src/tests/perf_tests/ANGLEPerfTest.cpp b/src/tests/perf_tests/ANGLEPerfTest.cpp index 7e10346ba..554face58 100644 --- a/src/tests/perf_tests/ANGLEPerfTest.cpp +++ b/src/tests/perf_tests/ANGLEPerfTest.cpp @@ -441,6 +441,33 @@ void ANGLEPerfTest::SetUp() {} void ANGLEPerfTest::TearDown() {} +void ANGLEPerfTest::recordIntegerMetric(const char *metric, size_t value, const std::string &units) +{ + // Prints "RESULT ..." to stdout + mReporter->AddResult(metric, value); + + // Saves results to file if enabled + TestSuite::GetMetricWriter().writeInfo(mName, mBackend, mStory, metric, units); + TestSuite::GetMetricWriter().writeIntegerValue(value); +} + +void ANGLEPerfTest::recordDoubleMetric(const char *metric, double value, const std::string &units) +{ + // Prints "RESULT ..." to stdout + mReporter->AddResult(metric, value); + + // Saves results to file if enabled + TestSuite::GetMetricWriter().writeInfo(mName, mBackend, mStory, metric, units); + TestSuite::GetMetricWriter().writeDoubleValue(value); +} + +void ANGLEPerfTest::addHistogramSample(const char *metric, double value, const std::string &units) +{ + std::string measurement = mName + mBackend + metric; + // Output histogram JSON set format if enabled. + TestSuite::GetInstance()->addHistogramSample(measurement, mStory, value, units); +} + void ANGLEPerfTest::processResults() { processClockResult(".cpu_time", mTrialTimer.getElapsedCpuTime()); @@ -490,32 +517,22 @@ void ANGLEPerfTest::processResults() // Median { - size_t midpoint = samples.size() >> 1; + size_t midpoint = samples.size() / 2; std::nth_element(samples.begin(), samples.begin() + midpoint, samples.end()); - std::stringstream medianStr; - medianStr << "." << counterName << "_median"; - std::string medianName = medianStr.str(); - - mReporter->AddResult(medianName, static_cast(samples[midpoint])); - - std::string measurement = mName + mBackend + "." + counterName + "_median"; - TestSuite::GetInstance()->addHistogramSample( - measurement, mStory, static_cast(samples[midpoint]), "count"); + std::string medianName = "." + counterName + "_median"; + recordIntegerMetric(medianName.c_str(), static_cast(samples[midpoint]), + "count"); + addHistogramSample(medianName.c_str(), static_cast(samples[midpoint]), "count"); } // Maximum { const auto &maxIt = std::max_element(samples.begin(), samples.end()); - std::stringstream maxStr; - maxStr << "." << counterName << "_max"; - std::string maxName = maxStr.str(); - mReporter->AddResult(maxName, static_cast(*maxIt)); - - std::string measurement = mName + mBackend + "." + counterName + "_max"; - TestSuite::GetInstance()->addHistogramSample(measurement, mStory, - static_cast(*maxIt), "count"); + std::string maxName = "." + counterName + "_max"; + recordIntegerMetric(maxName.c_str(), static_cast(*maxIt), "count"); + addHistogramSample(maxName.c_str(), static_cast(*maxIt), "count"); } // Sum @@ -523,14 +540,9 @@ void ANGLEPerfTest::processResults() GLuint64 sum = std::accumulate(samples.begin(), samples.end(), static_cast(0)); - std::stringstream sumStr; - sumStr << "." << counterName << "_sum"; - std::string sumName = sumStr.str(); - mReporter->AddResult(sumName, static_cast(sum)); - - std::string measurement = mName + mBackend + "." + counterName + "_sum"; - TestSuite::GetInstance()->addHistogramSample(measurement, mStory, - static_cast(sum), "count"); + std::string sumName = "." + counterName + "_max"; + recordIntegerMetric(sumName.c_str(), static_cast(sum), "count"); + addHistogramSample(sumName.c_str(), static_cast(sum), "count"); } } } @@ -564,12 +576,9 @@ void ANGLEPerfTest::processClockResult(const char *metric, double resultSeconds) { result = secondsPerIteration * kNanoSecondsPerSecond; } - mReporter->AddResult(metric, result); - - // Output histogram JSON set format if enabled. - TestSuite::GetInstance()->addHistogramSample(mName + mBackend + metric, mStory, - secondsPerIteration * kMilliSecondsPerSecond, - "msBestFitFormat_smallerIsBetter"); + recordDoubleMetric(metric, result, units); + addHistogramSample(metric, secondsPerIteration * kMilliSecondsPerSecond, + "msBestFitFormat_smallerIsBetter"); } void ANGLEPerfTest::processMemoryResult(const char *metric, uint64_t resultKB) @@ -580,11 +589,9 @@ void ANGLEPerfTest::processMemoryResult(const char *metric, uint64_t resultKB) mReporter->RegisterImportantMetric(metric, "sizeInBytes"); } - mReporter->AddResult(metric, static_cast(resultKB * 1000)); - - TestSuite::GetInstance()->addHistogramSample(mName + mBackend + metric, mStory, - static_cast(resultKB) * 1000.0, - "sizeInBytes_smallerIsBetter"); + recordIntegerMetric(metric, static_cast(resultKB * 1000), "sizeInBytes"); + addHistogramSample(metric, static_cast(resultKB) * 1000.0, + "sizeInBytes_smallerIsBetter"); } double ANGLEPerfTest::normalizedTime(size_t value) const diff --git a/src/tests/perf_tests/ANGLEPerfTest.h b/src/tests/perf_tests/ANGLEPerfTest.h index 75753e1df..0e66e7647 100644 --- a/src/tests/perf_tests/ANGLEPerfTest.h +++ b/src/tests/perf_tests/ANGLEPerfTest.h @@ -111,6 +111,10 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable void calibrateStepsToRun(); int estimateStepsToRun() const; + void recordIntegerMetric(const char *metric, size_t value, const std::string &units); + void recordDoubleMetric(const char *metric, double value, const std::string &units); + void addHistogramSample(const char *metric, double value, const std::string &units); + void processResults(); void processClockResult(const char *metric, double resultSeconds); void processMemoryResult(const char *metric, uint64_t resultKB); diff --git a/src/tests/run_perf_tests.py b/src/tests/run_perf_tests.py index 3c82739cc..90b9abde4 100755 --- a/src/tests/run_perf_tests.py +++ b/src/tests/run_perf_tests.py @@ -14,11 +14,13 @@ import importlib import io import json import logging +import tempfile import time import os import pathlib import re import subprocess +import shutil import sys SCRIPT_DIR = str(pathlib.Path(__file__).resolve().parent) @@ -56,6 +58,15 @@ EXIT_FAILURE = 1 EXIT_SUCCESS = 0 +@contextlib.contextmanager +def temporary_dir(prefix=''): + path = tempfile.mkdtemp(prefix=prefix) + try: + yield path + finally: + shutil.rmtree(path) + + def _shard_tests(tests, shard_count, shard_index): return [tests[index] for index in range(shard_index, len(tests), shard_count)] @@ -110,7 +121,7 @@ def _coefficient_of_variation(data): return stddev / c -def _save_extra_output_files(args, results, histograms): +def _save_extra_output_files(args, results, histograms, metrics): isolated_out_dir = os.path.dirname(args.isolated_script_test_output) if not os.path.isdir(isolated_out_dir): return @@ -124,6 +135,9 @@ def _save_extra_output_files(args, results, histograms): with open(perf_output_path, 'w') as out_file: out_file.write(json.dumps(histograms.AsDicts(), indent=2)) + with open(os.path.join(benchmark_path, 'angle_metrics.json'), 'w') as f: + f.write(json.dumps(metrics, indent=2)) + class Results: @@ -191,6 +205,14 @@ def _read_histogram(histogram_file_path): return histogram +def _read_metrics(metrics_file_path): + try: + with open(metrics_file_path) as f: + return [json.loads(l) for l in f] + except FileNotFoundError: + return [] + + def _merge_into_one_histogram(test_histogram_set): with common.temporary_file() as merge_histogram_path: logging.info('Writing merged histograms to %s.' % merge_histogram_path) @@ -267,8 +289,10 @@ def _run_perf(args, common_args, env, steps_per_trial=None): if args.perf_counters: run_args += ['--perf-counters', args.perf_counters] - with common.temporary_file() as histogram_file_path: + with temporary_dir() as render_output_dir: + histogram_file_path = os.path.join(render_output_dir, 'histogram') run_args += ['--isolated-script-test-perf-output=%s' % histogram_file_path] + run_args += ['--render-test-output-dir=%s' % render_output_dir] exit_code, output, json_results = _run_test_suite(args, run_args, env) if exit_code != EXIT_SUCCESS: @@ -276,10 +300,11 @@ def _run_perf(args, common_args, env, steps_per_trial=None): if SKIP in json_results['num_failures_by_type']: return SKIP, None, None - sample_wall_times = _get_results_from_output(output, 'wall_time') - if sample_wall_times: + sample_metrics = _read_metrics(os.path.join(render_output_dir, 'angle_metrics')) + + if sample_metrics: sample_histogram = _read_histogram(histogram_file_path) - return PASS, sample_wall_times, sample_histogram + return PASS, sample_metrics, sample_histogram return FAIL, None, None @@ -305,6 +330,7 @@ def _run_tests(tests, args, extra_flags, env): result_suffix = '_shard%d' % (args.shard_index if args.shard_index != None else None) results = Results(result_suffix) histograms = histogram_set.HistogramSet() + metrics = [] total_errors = 0 prepared_traces = set() @@ -358,7 +384,7 @@ def _run_tests(tests, args, extra_flags, env): test_histogram_set = histogram_set.HistogramSet() for sample in range(args.samples_per_test): try: - test_status, sample_wall_times, sample_histogram = _run_perf( + test_status, sample_metrics, sample_histogram = _run_perf( args, common_args, env, steps_per_trial) except RuntimeError as e: logging.error(e) @@ -370,11 +396,15 @@ def _run_tests(tests, args, extra_flags, env): results.result_skip(test) break - if not sample_wall_times: + if not sample_metrics: logging.error('Test %s failed to produce a sample output' % test) results.result_fail(test) break + sample_wall_times = [ + float(m['value']) for m in sample_metrics if m['metric'] == '.wall_time' + ] + logging.info('Test %d/%d Sample %d/%d wall_times: %s' % (test_index + 1, len(tests), sample + 1, args.samples_per_test, str(sample_wall_times))) @@ -387,6 +417,7 @@ def _run_tests(tests, args, extra_flags, env): wall_times += sample_wall_times test_histogram_set.Merge(sample_histogram) + metrics.append(sample_metrics) if not results.has_result(test): assert len(wall_times) == (args.samples_per_test * args.trials_per_sample) @@ -396,7 +427,7 @@ def _run_tests(tests, args, extra_flags, env): histograms.Merge(_merge_into_one_histogram(test_histogram_set)) results.result_pass(test) - return results, histograms + return results, histograms, metrics def _find_test_suite_directory(test_suite): @@ -608,7 +639,7 @@ def main(): try: with _maybe_lock_gpu_clocks(): - results, histograms = _run_tests(tests, args, extra_flags, env) + results, histograms, metrics = _run_tests(tests, args, extra_flags, env) except _MaxErrorsException: logging.error('Error count exceeded max errors (%d). Aborting.' % args.max_errors) return EXIT_FAILURE @@ -620,7 +651,7 @@ def main(): results.save_to_output_file(args.test_suite, args.isolated_script_test_output) # Uses special output files to match the merge script. - _save_extra_output_files(args, results, histograms) + _save_extra_output_files(args, results, histograms, metrics) if args.isolated_script_test_perf_output: with open(args.isolated_script_test_perf_output, 'w') as out_file: diff --git a/src/tests/test_utils/runner/TestSuite.cpp b/src/tests/test_utils/runner/TestSuite.cpp index 622ed2227..96a708cf3 100644 --- a/src/tests/test_utils/runner/TestSuite.cpp +++ b/src/tests/test_utils/runner/TestSuite.cpp @@ -807,6 +807,61 @@ bool UsesExternalBatching() } } // namespace +void MetricWriter::enable(const std::string &testArtifactDirectory) +{ + mPath = testArtifactDirectory + GetPathSeparator() + "angle_metrics"; +} + +void MetricWriter::writeInfo(const std::string &name, + const std::string &backend, + const std::string &story, + const std::string &metric, + const std::string &units) +{ + if (mPath.empty()) + { + return; + } + + if (mFile == nullptr) + { + mFile = fopen(mPath.c_str(), "w"); + } + ASSERT(mFile != nullptr); + + fprintf(mFile, "{\"name\":\"%s\",", name.c_str()); + fprintf(mFile, "\"backend\":\"%s\",", backend.c_str()); + fprintf(mFile, "\"story\":\"%s\",", story.c_str()); + fprintf(mFile, "\"metric\":\"%s\",", metric.c_str()); + fprintf(mFile, "\"units\":\"%s\",", units.c_str()); + // followed by writing value, so no closing bracket yet +} + +void MetricWriter::writeDoubleValue(double value) +{ + if (mFile != nullptr) + { + fprintf(mFile, "\"value\":\"%lf\"}\n", value); + } +} + +void MetricWriter::writeIntegerValue(size_t value) +{ + if (mFile != nullptr) + { + fprintf(mFile, "\"value\":\"%zu\"}\n", value); + } +} + +void MetricWriter::close() +{ + if (mFile != nullptr) + { + fclose(mFile); + mFile = nullptr; + } +} + // static TestSuite *TestSuite::mInstance = nullptr; @@ -1178,6 +1233,11 @@ TestSuite::TestSuite(int *argc, char **argv, std::function registerTests mResultsFile = resultFileName.str(); } + if (!mTestArtifactDirectory.empty()) + { + mMetricWriter.enable(mTestArtifactDirectory); + } + if (!mBotMode) { testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners(); @@ -1927,6 +1987,8 @@ void TestSuite::writeOutputFiles(bool interrupted) { WriteHistogramJson(mHistogramWriter, mHistogramJsonFile); } + + mMetricWriter.close(); } const char *TestResultTypeToString(TestResultType type) diff --git a/src/tests/test_utils/runner/TestSuite.h b/src/tests/test_utils/runner/TestSuite.h index 74dd0fbe3..20f1f6ce1 100644 --- a/src/tests/test_utils/runner/TestSuite.h +++ b/src/tests/test_utils/runner/TestSuite.h @@ -123,6 +123,29 @@ struct ProcessInfo : angle::NonCopyable using TestQueue = std::queue>; +class MetricWriter +{ + public: + MetricWriter() {} + + void enable(const std::string &testArtifactDirectory); + + void writeInfo(const std::string &name, + const std::string &backend, + const std::string &story, + const std::string &metric, + const std::string &units); + + void writeDoubleValue(double value); + void writeIntegerValue(size_t value); + + void close(); + + private: + std::string mPath; + FILE *mFile = nullptr; +}; + class TestSuite { public: @@ -138,6 +161,7 @@ class TestSuite const std::string &units); static TestSuite *GetInstance() { return mInstance; } + static MetricWriter &GetMetricWriter() { return GetInstance()->mMetricWriter; } // Returns the path to the artifact in the output directory. bool hasTestArtifactsDirectory() const; @@ -207,6 +231,7 @@ class TestSuite std::vector mCurrentProcesses; std::thread mWatchdogThread; HistogramWriter mHistogramWriter; + MetricWriter mMetricWriter; std::string mTestArtifactDirectory; GPUTestExpectationsParser mTestExpectationsParser;