summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThiago Marcos P. Santos <tmpsantos@gmail.com>2019-12-15 01:13:17 +0200
committerThiago Marcos P. Santos <tmpsantos@gmail.com>2019-12-16 22:52:01 +0200
commit1c138124a62bf971ace637ac76c752a488e99989 (patch)
treee14bb6100abc9dcee123392e8f1a8843c72251ae
parent65140e3e6012c8aac077f30cd722b12ab6c836d7 (diff)
downloadqtlocation-mapboxgl-1c138124a62bf971ace637ac76c752a488e99989.tar.gz
[render-test] Fix metrics passing when failing
Metrics will now fail, but the tool will return 0 when *ONLY* metrics fail, to make the bot continue. We will also run ignored tests and check if they are ignored but passing.
-rw-r--r--render-test/render_test.cpp19
-rw-r--r--render-test/runner.cpp21
-rw-r--r--render-test/runner.hpp8
3 files changed, 32 insertions, 16 deletions
diff --git a/render-test/render_test.cpp b/render-test/render_test.cpp
index d8d980acf8..5b2a91cac4 100644
--- a/render-test/render_test.cpp
+++ b/render-test/render_test.cpp
@@ -119,6 +119,7 @@ ArgumentsTuple parseArguments(int argc, char** argv) {
namespace mbgl {
int runRenderTests(int argc, char** argv, std::function<void()> testStatus) {
+ int returnCode = 0;
bool recycleMap;
bool shuffle;
uint32_t seed;
@@ -177,9 +178,11 @@ int runRenderTests(int argc, char** argv, std::function<void()> testStatus) {
}
}
- bool errored = !metadata.errorMessage.empty() || !runner.run(metadata);
- bool passed =
- !errored && (!metadata.outputsImage || !metadata.diff.empty()) && metadata.difference <= metadata.allowed;
+ auto results = runner.run(metadata);
+
+ bool errored = !metadata.errorMessage.empty() || !(results.probeFailed || results.renderFailed);
+ bool imageOK = (!metadata.outputsImage || !metadata.diff.empty()) && metadata.difference <= metadata.allowed;
+ bool passed = !errored && imageOK;
if (shouldIgnore) {
if (passed) {
@@ -194,6 +197,14 @@ int runRenderTests(int argc, char** argv, std::function<void()> testStatus) {
printf(ANSI_COLOR_LIGHT_GRAY "* ignore %s (%s)" ANSI_COLOR_RESET "\n", id.c_str(), ignoreReason.c_str());
}
} else {
+ // Only fail the bots on render errors, this is a CI limitation that we need
+ // to succeed on metrics failed so the rebaseline bot can run next in the
+ // pipeline and collect the new baselines. The rebaseline bot will ultimately
+ // report the error and block the patch from being merged.
+ if (results.renderFailed == false || !imageOK) {
+ returnCode = 1;
+ }
+
if (passed) {
status = "passed";
color = "green";
@@ -246,7 +257,7 @@ int runRenderTests(int argc, char** argv, std::function<void()> testStatus) {
printf("Results at: %s\n", mbgl::filesystem::canonical(resultPath).c_str());
- return stats.failedTests + stats.erroredTests == 0 ? 0 : 1;
+ return returnCode;
}
} // namespace mbgl
diff --git a/render-test/runner.cpp b/render-test/runner.cpp
index 2f97f63e49..9b8f8d19f1 100644
--- a/render-test/runner.cpp
+++ b/render-test/runner.cpp
@@ -243,7 +243,7 @@ bool TestRunner::checkRenderTestResults(mbgl::PremultipliedImage&& actualImage,
}
bool TestRunner::checkProbingResults(TestMetadata& resultMetadata) {
- if (resultMetadata.metrics.isEmpty() || resultMetadata.ignoredTest) return true;
+ if (resultMetadata.metrics.isEmpty()) return true;
const auto writeMetrics = [&resultMetadata](const mbgl::filesystem::path& path,
const std::string& message = std::string()) {
mbgl::filesystem::create_directories(path);
@@ -282,7 +282,7 @@ bool TestRunner::checkProbingResults(TestMetadata& resultMetadata) {
if (resultMetadata.expectedMetrics.isEmpty()) {
resultMetadata.errorMessage =
"Failed to find metric expectations for: " + resultMetadata.paths.stylePath.string();
- if (updateResults == UpdateResults::REBASELINE) {
+ if (updateResults == UpdateResults::REBASELINE && !resultMetadata.ignoredTest) {
writeMetrics(expectedMetrics.back(), ". Created baseline for missing metrics.");
}
return false;
@@ -509,7 +509,7 @@ bool TestRunner::checkProbingResults(TestMetadata& resultMetadata) {
bool checkResult = checkFileSize(resultMetadata) && checkMemory(resultMetadata) && checkNetwork(resultMetadata) &&
checkFps(resultMetadata) && checkGfx(resultMetadata);
- if (!checkResult && updateResults == UpdateResults::REBASELINE) {
+ if (!checkResult && updateResults == UpdateResults::REBASELINE && !resultMetadata.ignoredTest) {
writeMetrics(expectedMetrics.back(), " Rebaselined expected metric for failed test.");
}
@@ -649,7 +649,7 @@ TestRunner::Impl::Impl(const TestMetadata& metadata)
TestRunner::Impl::~Impl() {}
-bool TestRunner::run(TestMetadata& metadata) {
+TestRunner::TestResults TestRunner::run(TestMetadata& metadata) {
AllocationIndex::setActive(false);
AllocationIndex::reset();
ProxyFileSource::setTrackingActive(false);
@@ -678,7 +678,7 @@ bool TestRunner::run(TestMetadata& metadata) {
if (!metadata.ignoredTest) {
for (const auto& operation : getBeforeOperations(manifest)) {
- if (!operation(ctx)) return false;
+ if (!operation(ctx)) return {false, false};
}
}
@@ -696,26 +696,25 @@ bool TestRunner::run(TestMetadata& metadata) {
resetContext(metadata, ctx);
for (const auto& operation : parseTestOperations(metadata, manifest)) {
- if (!operation(ctx)) return false;
+ if (!operation(ctx)) return {false, false};
}
HeadlessFrontend::RenderResult result;
try {
if (metadata.outputsImage) result = frontend.render(ctx.getMap());
} catch (const std::exception&) {
- return false;
+ return {false, false};
}
if (!metadata.ignoredTest) {
ctx.activeGfxProbe = GfxProbe(result.stats, ctx.activeGfxProbe);
for (const auto& operation : getAfterOperations(manifest)) {
- if (!operation(ctx)) return false;
+ if (!operation(ctx)) return {false, false};
}
}
if (metadata.renderTest) {
- (void)checkProbingResults(metadata);
- return checkRenderTestResults(std::move(result.image), metadata);
+ return {checkProbingResults(metadata), checkRenderTestResults(std::move(result.image), metadata)};
} else {
std::vector<mbgl::Feature> features;
assert(metadata.document["metadata"]["test"]["queryGeometry"].IsArray());
@@ -725,7 +724,7 @@ bool TestRunner::run(TestMetadata& metadata) {
} else {
features = frontend.getRenderer()->queryRenderedFeatures(metadata.queryGeometryBox, metadata.queryOptions);
}
- return checkQueryTestResults(std::move(result.image), std::move(features), metadata);
+ return {true, checkQueryTestResults(std::move(result.image), std::move(features), metadata)};
}
}
diff --git a/render-test/runner.hpp b/render-test/runner.hpp
index 7fbe0f7677..9d106c879e 100644
--- a/render-test/runner.hpp
+++ b/render-test/runner.hpp
@@ -36,8 +36,14 @@ public:
class TestRunner {
public:
enum class UpdateResults { NO, DEFAULT, PLATFORM, METRICS, REBASELINE };
+
+ struct TestResults {
+ bool probeFailed;
+ bool renderFailed;
+ };
+
TestRunner(Manifest, UpdateResults);
- bool run(TestMetadata&);
+ TestResults run(TestMetadata&);
void reset();
// Manifest