mirror of
https://github.com/godotengine/godot-angle-static.git
synced 2026-01-06 02:09:55 +03:00
Perf Tests: Add --max-steps-performed argument.
This will limit the total number of steps run in the test. When using this argument we skip warmp & calibration. Renames the prior --steps argument to --steps-per-trial. Bug: b/172480147 Change-Id: Iae7cce26d66ca0bbdb21c1f511a6bfa246bd204e Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/2519404 Commit-Queue: Jamie Madill <jmadill@chromium.org> Reviewed-by: Cody Northrop <cnorthrop@google.com>
This commit is contained in:
@@ -135,7 +135,7 @@ print("running with %d steps." % steps)
|
||||
|
||||
# Loop 'max_experiments' times, running the tests.
|
||||
for experiment in range(max_experiments):
|
||||
experiment_scores = get_results(metric, ["--steps", str(steps)])
|
||||
experiment_scores = get_results(metric, ["--steps-per-trial", str(steps)])
|
||||
|
||||
for score in experiment_scores:
|
||||
sys.stdout.write("%s: %.2f" % (metric, score))
|
||||
|
||||
@@ -181,8 +181,9 @@ ANGLEPerfTest::ANGLEPerfTest(const std::string &name,
|
||||
mStory(story),
|
||||
mGPUTimeNs(0),
|
||||
mSkipTest(false),
|
||||
mStepsToRun(gStepsToRunOverride),
|
||||
mNumStepsPerformed(0),
|
||||
mStepsToRun(std::max(gStepsPerTrial, gMaxStepsPerformed)),
|
||||
mTrialNumStepsPerformed(0),
|
||||
mTotalNumStepsPerformed(0),
|
||||
mStepsPerRunLoopStep(1),
|
||||
mIterationsPerStep(iterationsPerStep),
|
||||
mRunning(true)
|
||||
@@ -198,7 +199,8 @@ ANGLEPerfTest::ANGLEPerfTest(const std::string &name,
|
||||
mReporter = std::make_unique<perf_test::PerfResultReporter>(mName + mBackend, mStory);
|
||||
mReporter->RegisterImportantMetric(".wall_time", units);
|
||||
mReporter->RegisterImportantMetric(".gpu_time", units);
|
||||
mReporter->RegisterFyiMetric(".steps", "count");
|
||||
mReporter->RegisterFyiMetric(".trial_steps", "count");
|
||||
mReporter->RegisterFyiMetric(".total_steps", "count");
|
||||
}
|
||||
|
||||
ANGLEPerfTest::~ANGLEPerfTest() {}
|
||||
@@ -225,7 +227,7 @@ void ANGLEPerfTest::run()
|
||||
double trialTime = mTimer.getElapsedTime();
|
||||
printf("Trial %d time: %.2lf seconds.\n", trial + 1, trialTime);
|
||||
|
||||
double secondsPerStep = trialTime / static_cast<double>(mNumStepsPerformed);
|
||||
double secondsPerStep = trialTime / static_cast<double>(mTrialNumStepsPerformed);
|
||||
double secondsPerIteration = secondsPerStep / static_cast<double>(mIterationsPerStep);
|
||||
mTestTrialResults.push_back(secondsPerIteration * 1000.0);
|
||||
}
|
||||
@@ -265,9 +267,9 @@ void ANGLEPerfTest::setStepsPerRunLoopStep(int stepsPerRunLoop)
|
||||
|
||||
void ANGLEPerfTest::doRunLoop(double maxRunTime, int maxStepsToRun, RunLoopPolicy runPolicy)
|
||||
{
|
||||
mNumStepsPerformed = 0;
|
||||
mRunning = true;
|
||||
mGPUTimeNs = 0;
|
||||
mTrialNumStepsPerformed = 0;
|
||||
mRunning = true;
|
||||
mGPUTimeNs = 0;
|
||||
mTimer.start();
|
||||
startTest();
|
||||
|
||||
@@ -282,12 +284,17 @@ void ANGLEPerfTest::doRunLoop(double maxRunTime, int maxStepsToRun, RunLoopPolic
|
||||
|
||||
if (mRunning)
|
||||
{
|
||||
mNumStepsPerformed += mStepsPerRunLoopStep;
|
||||
mTrialNumStepsPerformed += mStepsPerRunLoopStep;
|
||||
mTotalNumStepsPerformed += mStepsPerRunLoopStep;
|
||||
if (mTimer.getElapsedTime() > maxRunTime)
|
||||
{
|
||||
mRunning = false;
|
||||
}
|
||||
else if (mNumStepsPerformed >= maxStepsToRun)
|
||||
else if (mTrialNumStepsPerformed >= maxStepsToRun)
|
||||
{
|
||||
mRunning = false;
|
||||
}
|
||||
else if (gMaxStepsPerformed > 0 && mTotalNumStepsPerformed >= gMaxStepsPerformed)
|
||||
{
|
||||
mRunning = false;
|
||||
}
|
||||
@@ -320,7 +327,8 @@ double ANGLEPerfTest::printResults()
|
||||
double retValue = 0.0;
|
||||
for (size_t i = 0; i < clocksToOutput; ++i)
|
||||
{
|
||||
double secondsPerStep = elapsedTimeSeconds[i] / static_cast<double>(mNumStepsPerformed);
|
||||
double secondsPerStep =
|
||||
elapsedTimeSeconds[i] / static_cast<double>(mTrialNumStepsPerformed);
|
||||
double secondsPerIteration = secondsPerStep / static_cast<double>(mIterationsPerStep);
|
||||
|
||||
perf_test::MetricInfo metricInfo;
|
||||
@@ -355,13 +363,16 @@ double ANGLEPerfTest::printResults()
|
||||
|
||||
if (gVerboseLogging)
|
||||
{
|
||||
double fps =
|
||||
static_cast<double>(mNumStepsPerformed * mIterationsPerStep) / elapsedTimeSeconds[0];
|
||||
double fps = static_cast<double>(mTrialNumStepsPerformed * mIterationsPerStep) /
|
||||
elapsedTimeSeconds[0];
|
||||
printf("Ran %0.2lf iterations per second\n", fps);
|
||||
}
|
||||
|
||||
mReporter->AddResult(".trial_steps", static_cast<size_t>(mTrialNumStepsPerformed));
|
||||
mReporter->AddResult(".total_steps", static_cast<size_t>(mTotalNumStepsPerformed));
|
||||
|
||||
// Output histogram JSON set format if enabled.
|
||||
double secondsPerStep = elapsedTimeSeconds[0] / static_cast<double>(mNumStepsPerformed);
|
||||
double secondsPerStep = elapsedTimeSeconds[0] / static_cast<double>(mTrialNumStepsPerformed);
|
||||
double secondsPerIteration = secondsPerStep / static_cast<double>(mIterationsPerStep);
|
||||
TestSuite::GetInstance()->addHistogramSample(
|
||||
mName + mBackend, mStory, secondsPerIteration * kMilliSecondsPerSecond, "msBestFitFormat");
|
||||
@@ -370,7 +381,7 @@ double ANGLEPerfTest::printResults()
|
||||
|
||||
double ANGLEPerfTest::normalizedTime(size_t value) const
|
||||
{
|
||||
return static_cast<double>(value) / static_cast<double>(mNumStepsPerformed);
|
||||
return static_cast<double>(value) / static_cast<double>(mTrialNumStepsPerformed);
|
||||
}
|
||||
|
||||
void ANGLEPerfTest::calibrateStepsToRun()
|
||||
@@ -381,7 +392,7 @@ void ANGLEPerfTest::calibrateStepsToRun()
|
||||
|
||||
// Scale steps down according to the time that exeeded one second.
|
||||
double scale = gTestTimeSeconds / elapsedTime;
|
||||
mStepsToRun = static_cast<unsigned int>(static_cast<double>(mNumStepsPerformed) * scale);
|
||||
mStepsToRun = static_cast<unsigned int>(static_cast<double>(mTrialNumStepsPerformed) * scale);
|
||||
mStepsToRun = std::max(1, mStepsToRun);
|
||||
|
||||
if (gVerboseLogging)
|
||||
@@ -390,7 +401,7 @@ void ANGLEPerfTest::calibrateStepsToRun()
|
||||
"Running %d steps (calibration took %.2lf seconds). Expecting trial time of %.2lf "
|
||||
"seconds.\n",
|
||||
mStepsToRun, elapsedTime,
|
||||
mStepsToRun * (elapsedTime / static_cast<double>(mNumStepsPerformed)));
|
||||
mStepsToRun * (elapsedTime / static_cast<double>(mTrialNumStepsPerformed)));
|
||||
}
|
||||
|
||||
// Calibration allows the perf test runner script to save some time.
|
||||
|
||||
@@ -89,7 +89,7 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable
|
||||
// Call if the test step was aborted and the test should stop running.
|
||||
void abortTest() { mRunning = false; }
|
||||
|
||||
int getNumStepsPerformed() const { return mNumStepsPerformed; }
|
||||
int getNumStepsPerformed() const { return mTrialNumStepsPerformed; }
|
||||
|
||||
// Defaults to one step per run loop. Can be changed in any test.
|
||||
void setStepsPerRunLoopStep(int stepsPerRunLoop);
|
||||
@@ -110,7 +110,8 @@ class ANGLEPerfTest : public testing::Test, angle::NonCopyable
|
||||
bool mSkipTest;
|
||||
std::unique_ptr<perf_test::PerfResultReporter> mReporter;
|
||||
int mStepsToRun;
|
||||
int mNumStepsPerformed;
|
||||
int mTrialNumStepsPerformed;
|
||||
int mTotalNumStepsPerformed;
|
||||
int mStepsPerRunLoopStep;
|
||||
int mIterationsPerStep;
|
||||
bool mRunning;
|
||||
|
||||
@@ -14,7 +14,8 @@
|
||||
namespace angle
|
||||
{
|
||||
bool gCalibration = false;
|
||||
int gStepsToRunOverride = -1;
|
||||
int gStepsPerTrial = 0;
|
||||
int gMaxStepsPerformed = 0;
|
||||
bool gEnableTrace = false;
|
||||
const char *gTraceFile = "ANGLETrace.json";
|
||||
const char *gScreenShotDir = nullptr;
|
||||
@@ -56,8 +57,8 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
|
||||
{
|
||||
if (strcmp("--one-frame-only", argv[argIndex]) == 0)
|
||||
{
|
||||
gStepsToRunOverride = 1;
|
||||
gWarmupLoops = 0;
|
||||
gStepsPerTrial = 1;
|
||||
gWarmupLoops = 0;
|
||||
}
|
||||
else if (strcmp("--enable-trace", argv[argIndex]) == 0)
|
||||
{
|
||||
@@ -73,9 +74,17 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
|
||||
{
|
||||
gCalibration = true;
|
||||
}
|
||||
else if (strcmp("--steps", argv[argIndex]) == 0 && argIndex < *argc - 1)
|
||||
else if (strcmp("--steps-per-trial", argv[argIndex]) == 0 && argIndex < *argc - 1)
|
||||
{
|
||||
gStepsToRunOverride = ReadIntArgument(argv[argIndex + 1]);
|
||||
gStepsPerTrial = ReadIntArgument(argv[argIndex + 1]);
|
||||
// Skip an additional argument.
|
||||
argIndex++;
|
||||
}
|
||||
else if (strcmp("--max-steps-performed", argv[argIndex]) == 0 && argIndex < *argc - 1)
|
||||
{
|
||||
gMaxStepsPerformed = ReadIntArgument(argv[argIndex + 1]);
|
||||
gWarmupLoops = 0;
|
||||
gTestTrials = 1;
|
||||
// Skip an additional argument.
|
||||
argIndex++;
|
||||
}
|
||||
@@ -84,7 +93,8 @@ void ANGLEProcessPerfTestArgs(int *argc, char **argv)
|
||||
gScreenShotDir = argv[argIndex + 1];
|
||||
argIndex++;
|
||||
}
|
||||
else if (strcmp("--verbose-logging", argv[argIndex]) == 0)
|
||||
else if (strcmp("--verbose-logging", argv[argIndex]) == 0 ||
|
||||
strcmp("--verbose", argv[argIndex]) == 0 || strcmp("-v", argv[argIndex]) == 0)
|
||||
{
|
||||
gVerboseLogging = true;
|
||||
}
|
||||
|
||||
@@ -15,7 +15,8 @@
|
||||
namespace angle
|
||||
{
|
||||
extern bool gCalibration;
|
||||
extern int gStepsToRunOverride;
|
||||
extern int gStepsPerTrial;
|
||||
extern int gMaxStepsPerformed;
|
||||
extern bool gEnableTrace;
|
||||
extern const char *gTraceFile;
|
||||
extern const char *gScreenShotDir;
|
||||
@@ -29,7 +30,7 @@ extern bool gStartTraceAfterSetup;
|
||||
|
||||
inline bool OneFrame()
|
||||
{
|
||||
return gStepsToRunOverride == 1;
|
||||
return gStepsPerTrial == 1 || gMaxStepsPerformed == 1;
|
||||
}
|
||||
} // namespace angle
|
||||
|
||||
|
||||
@@ -26,10 +26,11 @@ Several command-line arguments control how the tests run:
|
||||
* `--enable-trace`: Write a JSON event log that can be loaded in Chrome.
|
||||
* `--trace-file file`: Name of the JSON event log for `--enable-trace`.
|
||||
* `--calibration`: Prints the number of steps a test runs in a fixed time. Used by `perf_test_runner.py`.
|
||||
* `--steps x`: Fixed number of steps to run for each test.
|
||||
* `--steps-per-trial x`: Fixed number of steps to run for each test trial.
|
||||
* `--max-steps-performed x`: Upper maximum on total number of steps for the entire test run.
|
||||
* `--screenshot-dir dir`: Directory to store test screenshots. Only implemented in `TracePerfTest`.
|
||||
* `--render-test-output-dir=dir`: Equivalent to `--screenshot-dir dir`.
|
||||
* `--verbose-logging`: Print extra timing information.
|
||||
* `--verbose`: Print extra timing information.
|
||||
* `--warmup-loops x`: Number of times to warm up the test before starting timing. Defaults to 3.
|
||||
* `--no-warmup`: Skip warming up the tests. Equivalent to `--warmup-steps 0`.
|
||||
* `--test-time`: Run each test trial in a fixed time. Defaults to 1 second.
|
||||
|
||||
@@ -428,6 +428,14 @@ void TracePerfTest::drawBenchmark()
|
||||
// Add a time sample from GL and the host.
|
||||
sampleTime();
|
||||
|
||||
uint32_t endFrame = mEndFrame;
|
||||
if (gMaxStepsPerformed > 0)
|
||||
{
|
||||
endFrame =
|
||||
std::min(endFrame, gMaxStepsPerformed - mTotalNumStepsPerformed - 1 + mStartFrame);
|
||||
mStepsPerRunLoopStep = endFrame - mStartFrame + 1;
|
||||
}
|
||||
|
||||
for (uint32_t frame = mStartFrame; frame <= mEndFrame; ++frame)
|
||||
{
|
||||
char frameName[32];
|
||||
|
||||
Reference in New Issue
Block a user