diff options
author | Snappy Team <no-reply@google.com> | 2020-11-13 13:12:41 +0000 |
---|---|---|
committer | Victor Costan <costan@google.com> | 2020-11-18 23:21:12 +0000 |
commit | 11e5165b98c32038fad44ee282619484ed3b80da (patch) | |
tree | 77945b721f73205544cdbf2ebc86450380f116a8 | |
parent | 6835abd953cd66c6cf7716dd8347e9c5f245662a (diff) | |
download | snappy-git-11e5165b98c32038fad44ee282619484ed3b80da.tar.gz |
Add a benchmark that decreased the branch prediction memorization by increasing the amount of independent branches executed per benchmark iteration.
PiperOrigin-RevId: 342242843
-rw-r--r-- | snappy_unittest.cc | 27 |
1 files changed, 27 insertions, 0 deletions
diff --git a/snappy_unittest.cc b/snappy_unittest.cc index d14c56b..d433545 100644 --- a/snappy_unittest.cc +++ b/snappy_unittest.cc @@ -1284,6 +1284,33 @@ static void BM_UFlat(int iters, int arg) { } BENCHMARK(BM_UFlat)->DenseRange(0, ARRAYSIZE(files) - 1); +static void BM_UFlatMedley(testing::benchmark::State& state) { + constexpr int kFiles = ARRAYSIZE(files); + std::string zcontents[kFiles]; + size_t sizes[kFiles]; + size_t max_size = 0; + for (int i = 0; i < kFiles; i++) { + std::string contents = + ReadTestDataFile(files[i].filename, files[i].size_limit); + max_size = std::max(max_size, contents.size()); + sizes[i] = contents.size(); + snappy::Compress(contents.data(), contents.size(), &zcontents[i]); + } + + std::vector<char> dst(max_size); + + size_t processed = 0; + for (auto s : state) { + for (int i = 0; i < kFiles; i++) { + CHECK(snappy::RawUncompress(zcontents[i].data(), zcontents[i].size(), + dst.data())); + processed += sizes[i]; + } + } + SetBenchmarkBytesProcessed(processed); +} +BENCHMARK(BM_UFlatMedley); + static void BM_UValidate(int iters, int arg) { StopBenchmarkTiming(); |