summaryrefslogtreecommitdiff
path: root/render-test
diff options
context:
space:
mode:
authorAlexander Shalamov <alexander.shalamov@mapbox.com>2019-12-05 18:17:03 +0200
committerAlexander Shalamov <alexander.shalamov@mapbox.com>2019-12-09 17:53:34 +0200
commita9ba572118fdf862a5bbf3f536d66de3a23c4dd3 (patch)
tree0c15367788d948c7c26e6de54fe9b57bcd47f169 /render-test
parent3d0001642ba2d6290252f05592d54fd928f56b63 (diff)
downloadqtlocation-mapboxgl-a9ba572118fdf862a5bbf3f536d66de3a23c4dd3.tar.gz
[test-runner] Add rebaseline flag
Rebaseline flag allows to update or create new metric, in cases when metric check is failed or expected metric is missing.
Diffstat (limited to 'render-test')
-rw-r--r--render-test/render_test.cpp3
-rw-r--r--render-test/runner.cpp26
-rw-r--r--render-test/runner.hpp2
3 files changed, 24 insertions, 7 deletions
diff --git a/render-test/render_test.cpp b/render-test/render_test.cpp
index 163c148291..1982964311 100644
--- a/render-test/render_test.cpp
+++ b/render-test/render_test.cpp
@@ -47,7 +47,8 @@ ArgumentsTuple parseArguments(int argc, char** argv) {
const static std::unordered_map<std::string, TestRunner::UpdateResults> updateResultsFlags = {
{"default", TestRunner::UpdateResults::DEFAULT},
{"platform", TestRunner::UpdateResults::PLATFORM},
- {"metrics", TestRunner::UpdateResults::METRICS}};
+ {"metrics", TestRunner::UpdateResults::METRICS},
+ {"rebaseline", TestRunner::UpdateResults::REBASELINE}};
args::ArgumentParser argumentParser("Mapbox GL Test Runner");
diff --git a/render-test/runner.cpp b/render-test/runner.cpp
index 953576cf78..8735660fc2 100644
--- a/render-test/runner.cpp
+++ b/render-test/runner.cpp
@@ -316,11 +316,17 @@ bool TestRunner::checkRenderTestResults(mbgl::PremultipliedImage&& actualImage,
bool TestRunner::checkProbingResults(TestMetadata& metadata) {
if (metadata.metrics.isEmpty()) return true;
+ const auto writeMetrics = [&metadata](const mbgl::filesystem::path& path,
+ const std::string& message = std::string()) {
+ mbgl::filesystem::create_directories(path);
+ mbgl::util::write_file(path / "metrics.json", serializeMetrics(metadata.metrics));
+ metadata.errorMessage += message;
+ };
+
const std::vector<mbgl::filesystem::path>& expectedMetrics = metadata.paths.expectedMetrics;
if (updateResults == UpdateResults::METRICS) {
- mbgl::filesystem::create_directories(expectedMetrics.back());
- mbgl::util::write_file(expectedMetrics.back().string() + "/metrics.json", serializeMetrics(metadata.metrics));
- return true;
+ writeMetrics(expectedMetrics.back(), " Updated expected metrics.");
+ return false;
}
// Check the possible paths in reverse order, so that the default path with the test style will only be checked in
@@ -347,8 +353,12 @@ bool TestRunner::checkProbingResults(TestMetadata& metadata) {
if (metadata.expectedMetrics.isEmpty()) {
metadata.errorMessage = "Failed to find metric expectations for: " + metadata.paths.stylePath.string();
+ if (updateResults == UpdateResults::REBASELINE) {
+ writeMetrics(expectedMetrics.back(), ". Created baseline for missing metrics.");
+ }
return false;
}
+
// Check file size metrics.
auto checkFileSize = [](TestMetadata& metadata) -> bool {
if (metadata.metrics.fileSize.empty()) return true;
@@ -567,8 +577,14 @@ bool TestRunner::checkProbingResults(TestMetadata& metadata) {
return true;
};
- return checkFileSize(metadata) && checkMemory(metadata) && checkNetwork(metadata) && checkFps(metadata) &&
- checkGfx(metadata);
+ bool checkResult = checkFileSize(metadata) && checkMemory(metadata) && checkNetwork(metadata) &&
+ checkFps(metadata) && checkGfx(metadata);
+
+ if (!checkResult && updateResults == UpdateResults::REBASELINE) {
+ writeMetrics(expectedMetrics.back(), " Rebaselined expected metric for failed test.");
+ }
+
+ return checkResult;
}
bool TestRunner::runOperations(const std::string& key, TestMetadata& metadata, RunContext& ctx) {
diff --git a/render-test/runner.hpp b/render-test/runner.hpp
index 5214f203d8..b64c588e70 100644
--- a/render-test/runner.hpp
+++ b/render-test/runner.hpp
@@ -14,7 +14,7 @@ struct TestMetadata;
class TestRunner {
public:
- enum class UpdateResults { NO, DEFAULT, PLATFORM, METRICS };
+ enum class UpdateResults { NO, DEFAULT, PLATFORM, METRICS, REBASELINE };
TestRunner(Manifest, UpdateResults);
bool run(TestMetadata&);
void reset();