Repo created

This commit is contained in:
Fr4nz D13trich 2025-11-22 14:04:28 +01:00
parent 81b91f4139
commit f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions

View file

@ -0,0 +1,281 @@
# Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../../webrtc.gni")
if (rtc_enable_protobuf) {
import("//third_party/protobuf/proto_library.gni")
}
group("metrics") {
deps = [
":global_metrics_logger_and_exporter",
":metric",
":metrics_accumulator",
":metrics_exporter",
":metrics_logger",
":stdout_metrics_exporter",
]
}
if (rtc_include_tests) {
group("metrics_unittests") {
testonly = true
deps = [
":global_metrics_logger_and_exporter_test",
":metrics_accumulator_test",
":metrics_logger_test",
":print_result_proxy_metrics_exporter_test",
":stdout_metrics_exporter_test",
]
if (rtc_enable_protobuf) {
deps += [
":chrome_perf_dashboard_metrics_exporter_test",
":metrics_set_proto_file_exporter_test",
]
}
}
}
rtc_library("metric") {
visibility = [ "*" ]
sources = [
"metric.cc",
"metric.h",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
deps = [ "../../../api/units:timestamp" ]
}
rtc_library("metrics_logger") {
visibility = [ "*" ]
sources = [
"metrics_logger.cc",
"metrics_logger.h",
]
deps = [
":metric",
":metrics_accumulator",
"../..:array_view",
"../../../rtc_base/synchronization:mutex",
"../../../system_wrappers",
"../../numerics",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("metrics_accumulator") {
visibility = [ "*" ]
sources = [
"metrics_accumulator.cc",
"metrics_accumulator.h",
]
deps = [
":metric",
"../../../rtc_base:macromagic",
"../../../rtc_base/synchronization:mutex",
"../../numerics",
"../../units:timestamp",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("metrics_exporter") {
visibility = [ "*" ]
sources = [ "metrics_exporter.h" ]
deps = [
":metric",
"../..:array_view",
]
}
rtc_library("stdout_metrics_exporter") {
visibility = [ "*" ]
sources = [
"stdout_metrics_exporter.cc",
"stdout_metrics_exporter.h",
]
deps = [
":metric",
":metrics_exporter",
"../..:array_view",
"../../../rtc_base:stringutils",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("chrome_perf_dashboard_metrics_exporter") {
visibility = [ "*" ]
testonly = true
sources = [
"chrome_perf_dashboard_metrics_exporter.cc",
"chrome_perf_dashboard_metrics_exporter.h",
]
deps = [
":metric",
":metrics_exporter",
"../../../api:array_view",
"../../../test:fileutils",
"../../../test:perf_test",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
}
if (rtc_enable_protobuf) {
proto_library("metric_proto") {
visibility = [ "*" ]
sources = [ "proto/metric.proto" ]
proto_out_dir = "api/test/metrics/proto"
cc_generator_options = "lite"
}
}
rtc_library("metrics_set_proto_file_exporter") {
visibility = [ "*" ]
testonly = true
sources = [
"metrics_set_proto_file_exporter.cc",
"metrics_set_proto_file_exporter.h",
]
deps = [
":metric",
":metrics_exporter",
"../..:array_view",
"../../../rtc_base:logging",
"../../../test:fileutils",
]
if (rtc_enable_protobuf) {
deps += [ ":metric_proto" ]
}
}
rtc_library("print_result_proxy_metrics_exporter") {
visibility = [ "*" ]
testonly = true
sources = [
"print_result_proxy_metrics_exporter.cc",
"print_result_proxy_metrics_exporter.h",
]
deps = [
":metric",
":metrics_exporter",
"../..:array_view",
"../../../test:perf_test",
]
}
rtc_library("global_metrics_logger_and_exporter") {
visibility = [ "*" ]
sources = [
"global_metrics_logger_and_exporter.cc",
"global_metrics_logger_and_exporter.h",
]
deps = [
":metrics_exporter",
":metrics_logger",
"../../../rtc_base:checks",
"../../../system_wrappers",
]
}
if (rtc_include_tests) {
rtc_library("metrics_logger_test") {
testonly = true
sources = [ "metrics_logger_test.cc" ]
deps = [
":metric",
":metrics_logger",
"../../../system_wrappers",
"../../../test:test_support",
"../../numerics",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("metrics_accumulator_test") {
testonly = true
sources = [ "metrics_accumulator_test.cc" ]
deps = [
":metric",
":metrics_accumulator",
"../../../test:test_support",
"../../units:timestamp",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("stdout_metrics_exporter_test") {
testonly = true
sources = [ "stdout_metrics_exporter_test.cc" ]
deps = [
":metric",
":stdout_metrics_exporter",
"../../../test:test_support",
"../../units:timestamp",
]
}
rtc_library("print_result_proxy_metrics_exporter_test") {
testonly = true
sources = [ "print_result_proxy_metrics_exporter_test.cc" ]
deps = [
":metric",
":print_result_proxy_metrics_exporter",
"../../../test:test_support",
"../../units:timestamp",
]
}
rtc_library("global_metrics_logger_and_exporter_test") {
testonly = true
sources = [ "global_metrics_logger_and_exporter_test.cc" ]
deps = [
":global_metrics_logger_and_exporter",
":metric",
":metrics_exporter",
":metrics_logger",
"../../../system_wrappers",
"../../../test:test_support",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
if (rtc_enable_protobuf) {
rtc_library("metrics_set_proto_file_exporter_test") {
testonly = true
sources = [ "metrics_set_proto_file_exporter_test.cc" ]
deps = [
":metric",
":metric_proto",
":metrics_set_proto_file_exporter",
"../../../rtc_base:protobuf_utils",
"../../../test:fileutils",
"../../../test:test_support",
"../../units:timestamp",
]
}
rtc_library("chrome_perf_dashboard_metrics_exporter_test") {
testonly = true
sources = [ "chrome_perf_dashboard_metrics_exporter_test.cc" ]
deps = [
":chrome_perf_dashboard_metrics_exporter",
":metric",
"../../../api/units:timestamp",
"../../../test:fileutils",
"../../../test:test_support",
"//third_party/catapult/tracing/tracing:histogram",
]
}
}
}

View file

@ -0,0 +1,14 @@
specific_include_rules = {
"metrics_logger_and_exporter\.h": [
"+rtc_base/synchronization/mutex.h",
"+system_wrappers/include/clock.h",
],
"metrics_logger\.h": [
"+rtc_base/synchronization/mutex.h",
"+system_wrappers/include/clock.h",
],
"metrics_accumulator\.h": [
"+rtc_base/synchronization/mutex.h",
"+rtc_base/thread_annotations.h",
],
}

View file

@ -0,0 +1,146 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
#include <stdio.h>
#include <memory>
#include <string>
#include <vector>
#include "absl/memory/memory.h"
#include "absl/strings/string_view.h"
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "test/testsupport/file_utils.h"
#include "test/testsupport/perf_test_histogram_writer.h"
#include "test/testsupport/perf_test_result_writer.h"
namespace webrtc {
namespace test {
namespace {
std::string ToChromePerfDashboardUnit(Unit unit) {
switch (unit) {
case Unit::kMilliseconds:
return "msBestFitFormat";
case Unit::kPercent:
return "n%";
case Unit::kBytes:
return "sizeInBytes";
case Unit::kKilobitsPerSecond:
// Chrome Perf Dashboard doesn't have kpbs units, so we change the unit
// and value accordingly.
return "bytesPerSecond";
case Unit::kHertz:
return "Hz";
case Unit::kUnitless:
return "unitless";
case Unit::kCount:
return "count";
}
}
double ToChromePerfDashboardValue(double value, Unit unit) {
switch (unit) {
case Unit::kKilobitsPerSecond:
// Chrome Perf Dashboard doesn't have kpbs units, so we change the unit
// and value accordingly.
return value * 1000 / 8;
default:
return value;
}
}
ImproveDirection ToChromePerfDashboardImproveDirection(
ImprovementDirection direction) {
switch (direction) {
case ImprovementDirection::kBiggerIsBetter:
return ImproveDirection::kBiggerIsBetter;
case ImprovementDirection::kNeitherIsBetter:
return ImproveDirection::kNone;
case ImprovementDirection::kSmallerIsBetter:
return ImproveDirection::kSmallerIsBetter;
}
}
bool WriteMetricsToFile(const std::string& path, const std::string& data) {
CreateDir(DirName(path));
FILE* output = fopen(path.c_str(), "wb");
if (output == NULL) {
printf("Failed to write to %s.\n", path.c_str());
return false;
}
size_t written = fwrite(data.c_str(), sizeof(char), data.size(), output);
fclose(output);
if (written != data.size()) {
size_t expected = data.size();
printf("Wrote %zu, tried to write %zu\n", written, expected);
return false;
}
return true;
}
bool IsEmpty(const Metric::Stats& stats) {
return !stats.mean.has_value() && !stats.stddev.has_value() &&
!stats.min.has_value() && !stats.max.has_value();
}
} // namespace
ChromePerfDashboardMetricsExporter::ChromePerfDashboardMetricsExporter(
absl::string_view export_file_path)
: export_file_path_(export_file_path) {}
bool ChromePerfDashboardMetricsExporter::Export(
rtc::ArrayView<const Metric> metrics) {
std::unique_ptr<PerfTestResultWriter> writer =
absl::WrapUnique<PerfTestResultWriter>(CreateHistogramWriter());
for (const Metric& metric : metrics) {
if (metric.time_series.samples.empty() && IsEmpty(metric.stats)) {
// If there were no data collected for the metric it is expected that 0
// will be exported, so add 0 to the samples.
writer->LogResult(
metric.name, metric.test_case,
ToChromePerfDashboardValue(0, metric.unit),
ToChromePerfDashboardUnit(metric.unit),
/*important=*/false,
ToChromePerfDashboardImproveDirection(metric.improvement_direction));
continue;
}
if (metric.time_series.samples.empty()) {
writer->LogResultMeanAndError(
metric.name, metric.test_case,
ToChromePerfDashboardValue(*metric.stats.mean, metric.unit),
ToChromePerfDashboardValue(*metric.stats.stddev, metric.unit),
ToChromePerfDashboardUnit(metric.unit),
/*important=*/false,
ToChromePerfDashboardImproveDirection(metric.improvement_direction));
continue;
}
std::vector<double> samples(metric.time_series.samples.size());
for (size_t i = 0; i < metric.time_series.samples.size(); ++i) {
samples[i] = ToChromePerfDashboardValue(
metric.time_series.samples[i].value, metric.unit);
}
writer->LogResultList(
metric.name, metric.test_case, samples,
ToChromePerfDashboardUnit(metric.unit),
/*important=*/false,
ToChromePerfDashboardImproveDirection(metric.improvement_direction));
}
return WriteMetricsToFile(export_file_path_, writer->Serialize());
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,41 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_
#define API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_
#include <string>
#include "absl/strings/string_view.h"
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_exporter.h"
namespace webrtc {
namespace test {
// Exports all collected metrics in the Chrome Perf Dashboard proto format.
class ChromePerfDashboardMetricsExporter : public MetricsExporter {
public:
// `export_file_path` - path where the proto file will be written.
explicit ChromePerfDashboardMetricsExporter(
absl::string_view export_file_path);
~ChromePerfDashboardMetricsExporter() override = default;
bool Export(rtc::ArrayView<const Metric> metrics) override;
private:
const std::string export_file_path_;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_CHROME_PERF_DASHBOARD_METRICS_EXPORTER_H_

View file

@ -0,0 +1,248 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/chrome_perf_dashboard_metrics_exporter.h"
#include <fstream>
#include <map>
#include <vector>
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/testsupport/file_utils.h"
#include "third_party/catapult/tracing/tracing/value/histogram.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::DoubleNear;
using ::testing::Eq;
using ::testing::Test;
namespace proto = ::catapult::tracing::tracing::proto;
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
Metric::TimeSeries::Sample Sample(double value) {
return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1),
.value = value,
.sample_metadata = DefaultMetadata()};
}
std::string ReadFileAsString(const std::string& filename) {
std::ifstream infile(filename, std::ios_base::binary);
auto buffer = std::vector<char>(std::istreambuf_iterator<char>(infile),
std::istreambuf_iterator<char>());
return std::string(buffer.begin(), buffer.end());
}
class ChromePerfDashboardMetricsExporterTest : public Test {
protected:
~ChromePerfDashboardMetricsExporterTest() override = default;
void SetUp() override {
temp_filename_ = webrtc::test::TempFilename(
webrtc::test::OutputPath(),
"chrome_perf_dashboard_metrics_exporter_test");
}
void TearDown() override {
ASSERT_TRUE(webrtc::test::RemoveFile(temp_filename_));
}
std::string temp_filename_;
};
TEST_F(ChromePerfDashboardMetricsExporterTest, ExportMetricFormatCorrect) {
Metric metric1{
.name = "test_metric1",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name1",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}},
.stats =
Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
Metric metric2{
.name = "test_metric2",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.test_case = "test_case_name2",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}},
.stats = Metric::Stats{
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
proto::HistogramSet actual_histogram_set;
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(2));
// Validate output for `metric1`
EXPECT_THAT(actual_histogram_set.histograms(0).name(), Eq("test_metric1"));
EXPECT_THAT(actual_histogram_set.histograms(0).unit().unit(),
Eq(proto::Unit::MS_BEST_FIT_FORMAT));
EXPECT_THAT(actual_histogram_set.histograms(0).unit().improvement_direction(),
Eq(proto::ImprovementDirection::BIGGER_IS_BETTER));
EXPECT_THAT(
actual_histogram_set.histograms(0).diagnostics().diagnostic_map().size(),
Eq(1lu));
EXPECT_THAT(actual_histogram_set.histograms(0)
.diagnostics()
.diagnostic_map()
.at("stories")
.generic_set()
.values(0),
Eq("\"test_case_name1\""));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(2));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(10.0));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(1), Eq(20.0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(2));
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(20));
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(),
DoubleNear(2.64916, 0.1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15));
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(10));
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(30));
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(50));
// Validate output for `metric2`
EXPECT_THAT(actual_histogram_set.histograms(1).name(), Eq("test_metric2"));
EXPECT_THAT(actual_histogram_set.histograms(1).unit().unit(),
Eq(proto::Unit::BYTES_PER_SECOND));
EXPECT_THAT(actual_histogram_set.histograms(1).unit().improvement_direction(),
Eq(proto::ImprovementDirection::SMALLER_IS_BETTER));
EXPECT_THAT(
actual_histogram_set.histograms(1).diagnostics().diagnostic_map().size(),
Eq(1lu));
EXPECT_THAT(actual_histogram_set.histograms(1)
.diagnostics()
.diagnostic_map()
.at("stories")
.generic_set()
.values(0),
Eq("\"test_case_name2\""));
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values().size(), Eq(2));
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(0), Eq(2500.0));
EXPECT_THAT(actual_histogram_set.histograms(1).sample_values(1), Eq(5000.0));
EXPECT_THAT(actual_histogram_set.histograms(1).running().count(), Eq(2));
EXPECT_THAT(actual_histogram_set.histograms(1).running().max(), Eq(5000));
EXPECT_THAT(actual_histogram_set.histograms(1).running().meanlogs(),
DoubleNear(8.17062, 0.1));
EXPECT_THAT(actual_histogram_set.histograms(1).running().mean(), Eq(3750));
EXPECT_THAT(actual_histogram_set.histograms(1).running().min(), Eq(2500));
EXPECT_THAT(actual_histogram_set.histograms(1).running().sum(), Eq(7500));
EXPECT_THAT(actual_histogram_set.histograms(1).running().variance(),
Eq(3125000));
}
TEST_F(ChromePerfDashboardMetricsExporterTest,
ExportEmptyMetricExportsZeroValue) {
Metric metric{.name = "test_metric",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{}};
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric}));
proto::HistogramSet actual_histogram_set;
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1));
// Validate values for `metric`
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(0.0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(),
DoubleNear(0, 1e-6));
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(), Eq(0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0));
}
TEST_F(ChromePerfDashboardMetricsExporterTest,
ExportMetricWithOnlyStatsExportsMeanValues) {
Metric metric{.name = "test_metric",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{
.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric}));
proto::HistogramSet actual_histogram_set;
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1));
// Validate values for `metric`
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(15.0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(15));
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(),
DoubleNear(2.70805, 0.1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(15));
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(15));
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(15));
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0));
}
TEST_F(ChromePerfDashboardMetricsExporterTest,
ExportMetricWithOnlyStatsConvertsMeanValuesWhenRequired) {
Metric metric{.name = "test_metric",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{
.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
ChromePerfDashboardMetricsExporter exporter(temp_filename_);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric}));
proto::HistogramSet actual_histogram_set;
actual_histogram_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_THAT(actual_histogram_set.histograms().size(), Eq(1));
// Validate values for `metric`
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values().size(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).sample_values(0), Eq(1875.0));
EXPECT_THAT(actual_histogram_set.histograms(0).running().count(), Eq(1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().max(), Eq(1875));
EXPECT_THAT(actual_histogram_set.histograms(0).running().meanlogs(),
DoubleNear(7.53636, 0.1));
EXPECT_THAT(actual_histogram_set.histograms(0).running().mean(), Eq(1875));
EXPECT_THAT(actual_histogram_set.histograms(0).running().min(), Eq(1875));
EXPECT_THAT(actual_histogram_set.histograms(0).running().sum(), Eq(1875));
EXPECT_THAT(actual_histogram_set.histograms(0).running().variance(), Eq(0));
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,42 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
#include <memory>
#include <utility>
#include <vector>
#include "api/test/metrics/metrics_exporter.h"
#include "api/test/metrics/metrics_logger.h"
#include "rtc_base/checks.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
namespace test {
DefaultMetricsLogger* GetGlobalMetricsLogger() {
static DefaultMetricsLogger* logger_ =
new DefaultMetricsLogger(Clock::GetRealTimeClock());
return logger_;
}
bool ExportPerfMetric(MetricsLogger& logger,
std::vector<std::unique_ptr<MetricsExporter>> exporters) {
std::vector<Metric> metrics = logger.GetCollectedMetrics();
bool success = true;
for (auto& exporter : exporters) {
bool export_result = exporter->Export(metrics);
success = success && export_result;
}
return success;
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,32 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_
#define API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_
#include <memory>
#include <vector>
#include "api/test/metrics/metrics_exporter.h"
#include "api/test/metrics/metrics_logger.h"
namespace webrtc {
namespace test {
// Returns non-null global `MetricsLogger` to log metrics.
DefaultMetricsLogger* GetGlobalMetricsLogger();
bool ExportPerfMetric(MetricsLogger& logger,
std::vector<std::unique_ptr<MetricsExporter>> exporters);
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_GLOBAL_METRICS_LOGGER_AND_EXPORTER_H_

View file

@ -0,0 +1,131 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/types/optional.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_exporter.h"
#include "api/test/metrics/metrics_logger.h"
#include "system_wrappers/include/clock.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::Eq;
using ::testing::IsEmpty;
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
struct TestMetricsExporterFactory {
public:
std::unique_ptr<MetricsExporter> CreateExporter() {
return std::make_unique<TestMetricsExporter>(this, /*export_result=*/true);
}
std::unique_ptr<MetricsExporter> CreateFailureExporter() {
return std::make_unique<TestMetricsExporter>(this, /*export_result=*/false);
}
std::vector<Metric> exported_metrics;
private:
class TestMetricsExporter : public MetricsExporter {
public:
TestMetricsExporter(TestMetricsExporterFactory* factory, bool export_result)
: factory_(factory), export_result_(export_result) {}
~TestMetricsExporter() override = default;
bool Export(rtc::ArrayView<const Metric> metrics) override {
factory_->exported_metrics =
std::vector<Metric>(metrics.begin(), metrics.end());
return export_result_;
}
TestMetricsExporterFactory* factory_;
bool export_result_;
};
};
TEST(ExportPerfMetricTest, CollectedMetricsAreExporter) {
TestMetricsExporterFactory exporter_factory;
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.LogSingleValueMetric(
"metric_name", "test_case_name",
/*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
std::map<std::string, std::string>{{"key", "value"}});
std::vector<std::unique_ptr<MetricsExporter>> exporters;
exporters.push_back(exporter_factory.CreateExporter());
ASSERT_TRUE(ExportPerfMetric(logger, std::move(exporters)));
std::vector<Metric> metrics = exporter_factory.exported_metrics;
ASSERT_THAT(metrics.size(), Eq(1lu));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.time_series.samples.size(), Eq(1lu));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::nullopt);
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
TEST(ExportPerfMetricTest, OneFailedExporterDoesNotPreventExportToOthers) {
TestMetricsExporterFactory exporter_factory1;
TestMetricsExporterFactory exporter_factory2;
TestMetricsExporterFactory exporter_factory3;
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.LogSingleValueMetric("metric_name", "test_case_name",
/*value=*/10, Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
std::vector<std::unique_ptr<MetricsExporter>> exporters;
exporters.push_back(exporter_factory1.CreateExporter());
exporters.push_back(exporter_factory2.CreateFailureExporter());
exporters.push_back(exporter_factory3.CreateExporter());
ASSERT_FALSE(ExportPerfMetric(logger, std::move(exporters)));
std::vector<Metric> metrics1 = exporter_factory1.exported_metrics;
std::vector<Metric> metrics2 = exporter_factory2.exported_metrics;
std::vector<Metric> metrics3 = exporter_factory3.exported_metrics;
ASSERT_THAT(metrics1.size(), Eq(1lu))
<< metrics1[0].name << "; " << metrics1[1].name;
EXPECT_THAT(metrics1[0].name, Eq("metric_name"));
ASSERT_THAT(metrics2.size(), Eq(1lu));
EXPECT_THAT(metrics2[0].name, Eq("metric_name"));
ASSERT_THAT(metrics3.size(), Eq(1lu));
EXPECT_THAT(metrics3[0].name, Eq("metric_name"));
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,48 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metric.h"
#include <string>
namespace webrtc {
namespace test {
absl::string_view ToString(Unit unit) {
switch (unit) {
case Unit::kMilliseconds:
return "Milliseconds";
case Unit::kPercent:
return "Percent";
case Unit::kBytes:
return "Bytes";
case Unit::kKilobitsPerSecond:
return "KilobitsPerSecond";
case Unit::kHertz:
return "Hertz";
case Unit::kUnitless:
return "Unitless";
case Unit::kCount:
return "Count";
}
}
absl::string_view ToString(ImprovementDirection direction) {
switch (direction) {
case ImprovementDirection::kBiggerIsBetter:
return "BiggerIsBetter";
case ImprovementDirection::kNeitherIsBetter:
return "NeitherIsBetter";
case ImprovementDirection::kSmallerIsBetter:
return "SmallerIsBetter";
}
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,96 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_METRIC_H_
#define API_TEST_METRICS_METRIC_H_
#include <map>
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/units/timestamp.h"
namespace webrtc {
namespace test {
enum class Unit {
kMilliseconds,
kPercent,
kBytes,
kKilobitsPerSecond,
kHertz,
// General unitless value. Can be used either for dimensionless quantities
// (ex ratio) or for units not presented in this enum and too specific to add
// to this enum.
kUnitless,
kCount
};
absl::string_view ToString(Unit unit);
enum class ImprovementDirection {
kBiggerIsBetter,
kNeitherIsBetter,
kSmallerIsBetter
};
absl::string_view ToString(ImprovementDirection direction);
struct Metric {
struct TimeSeries {
struct Sample {
// Timestamp in microseconds associated with a sample. For example,
// the timestamp when the sample was collected.
webrtc::Timestamp timestamp;
double value;
// Metadata associated with this particular sample.
std::map<std::string, std::string> sample_metadata;
};
// All samples collected for this metric. It can be empty if the Metric
// object only contains `stats`.
std::vector<Sample> samples;
};
// Contains metric's precomputed statistics based on the `time_series` or if
// `time_series` is omitted (has 0 samples) contains precomputed statistics
// provided by the metric's calculator.
struct Stats {
// Sample mean of the metric
// (https://en.wikipedia.org/wiki/Sample_mean_and_covariance).
absl::optional<double> mean;
// Standard deviation (https://en.wikipedia.org/wiki/Standard_deviation).
// Is undefined if `time_series` contains only a single value.
absl::optional<double> stddev;
absl::optional<double> min;
absl::optional<double> max;
};
// Metric name, for example PSNR, SSIM, decode_time, etc.
std::string name;
Unit unit;
ImprovementDirection improvement_direction;
// If the metric is generated by a test, this field can be used to specify
// this information.
std::string test_case;
// Metadata associated with the whole metric.
std::map<std::string, std::string> metric_metadata;
// Contains all samples of the metric collected during test execution.
// It can be empty if the user only stores precomputed statistics into
// `stats`.
TimeSeries time_series;
Stats stats;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_METRIC_H_

View file

@ -0,0 +1,132 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_accumulator.h"
#include <map>
#include <string>
#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace test {
namespace {
Metric::Stats ToStats(const SamplesStatsCounter& values) {
if (values.IsEmpty()) {
return Metric::Stats();
}
return Metric::Stats{.mean = values.GetAverage(),
.stddev = values.GetStandardDeviation(),
.min = values.GetMin(),
.max = values.GetMax()};
}
Metric SetTimeseries(const Metric& prototype,
const SamplesStatsCounter& counter) {
Metric output(prototype);
Metric::TimeSeries time_series;
for (const SamplesStatsCounter::StatsSample& sample :
counter.GetTimedSamples()) {
time_series.samples.push_back(
Metric::TimeSeries::Sample{.timestamp = sample.time,
.value = sample.value,
.sample_metadata = sample.metadata});
}
output.time_series = std::move(time_series);
output.stats = ToStats(counter);
return output;
}
} // namespace
bool operator<(const MetricsAccumulator::MetricKey& a,
const MetricsAccumulator::MetricKey& b) {
if (a.test_case_name < b.test_case_name) {
return true;
} else if (a.test_case_name > b.test_case_name) {
return false;
} else {
return a.metric_name < b.metric_name;
}
}
bool MetricsAccumulator::AddSample(
absl::string_view metric_name,
absl::string_view test_case_name,
double value,
Timestamp timestamp,
std::map<std::string, std::string> point_metadata) {
MutexLock lock(&mutex_);
bool created;
MetricValue* metric_value =
GetOrCreateMetric(metric_name, test_case_name, &created);
metric_value->counter.AddSample(
SamplesStatsCounter::StatsSample{.value = value,
.time = timestamp,
.metadata = std::move(point_metadata)});
return created;
}
bool MetricsAccumulator::AddMetricMetadata(
absl::string_view metric_name,
absl::string_view test_case_name,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metric_metadata) {
MutexLock lock(&mutex_);
bool created;
MetricValue* metric_value =
GetOrCreateMetric(metric_name, test_case_name, &created);
metric_value->metric.unit = unit;
metric_value->metric.improvement_direction = improvement_direction;
metric_value->metric.metric_metadata = std::move(metric_metadata);
return created;
}
std::vector<Metric> MetricsAccumulator::GetCollectedMetrics() const {
MutexLock lock(&mutex_);
std::vector<Metric> out;
out.reserve(metrics_.size());
for (const auto& [unused_key, metric_value] : metrics_) {
out.push_back(SetTimeseries(metric_value.metric, metric_value.counter));
}
return out;
}
MetricsAccumulator::MetricValue* MetricsAccumulator::GetOrCreateMetric(
absl::string_view metric_name,
absl::string_view test_case_name,
bool* created) {
MetricKey key(metric_name, test_case_name);
auto it = metrics_.find(key);
if (it != metrics_.end()) {
*created = false;
return &it->second;
}
*created = true;
Metric metric{
.name = key.metric_name,
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kNeitherIsBetter,
.test_case = key.test_case_name,
};
return &metrics_.emplace(key, MetricValue{.metric = std::move(metric)})
.first->second;
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,99 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_METRICS_ACCUMULATOR_H_
#define API_TEST_METRICS_METRICS_ACCUMULATOR_H_
#include <map>
#include <string>
#include <vector>
#include "absl/strings/string_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "rtc_base/synchronization/mutex.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
namespace test {
// Accumulates metrics' samples internally and provides API to get collected
// ones.
//
// This object is thread safe.
class MetricsAccumulator {
public:
MetricsAccumulator() = default;
// Adds sample for the specified `metric_name` within specified
// `test_case_name`. If it is the first time when this combination of
// `metric_name` and `test_case_name` is used, creates a new Metric to collect
// samples, otherwise adds a sample to the previously created Metric.
//
// By default metric will use `Unit::kUnitless` and
// `ImprovementDirection::kNeitherIsBetter`.
//
// `point_metadata` - the metadata to be added to the single data point that
// this method adds to the Metric (it is not a metric global metadata).
//
// Returns true if a new metric was created and false otherwise.
bool AddSample(absl::string_view metric_name,
absl::string_view test_case_name,
double value,
Timestamp timestamp,
std::map<std::string, std::string> point_metadata = {});
// Adds metadata to the metric specified by `metric_name` within specified
// `test_case_name`. If such a metric doesn't exist, creates a new one,
// otherwise overrides previously recorded values.
//
// Returns true if a new metric was created and false otherwise.
bool AddMetricMetadata(
absl::string_view metric_name,
absl::string_view test_case_name,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metric_metadata = {});
// Returns all metrics collected by this accumulator. No order guarantees
// provided.
std::vector<Metric> GetCollectedMetrics() const;
private:
struct MetricKey {
MetricKey(absl::string_view metric_name, absl::string_view test_case_name)
: metric_name(metric_name), test_case_name(test_case_name) {}
std::string metric_name;
std::string test_case_name;
};
friend bool operator<(const MetricKey& a, const MetricKey& b);
struct MetricValue {
SamplesStatsCounter counter;
Metric metric;
};
// Gets existing metrics or creates a new one. If metric was created `created`
// will be set to true.
MetricValue* GetOrCreateMetric(absl::string_view metric_name,
absl::string_view test_case_name,
bool* created)
RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_);
mutable Mutex mutex_;
std::map<MetricKey, MetricValue> metrics_ RTC_GUARDED_BY(mutex_);
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_METRICS_ACCUMULATOR_H_

View file

@ -0,0 +1,315 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_accumulator.h"
#include <map>
#include <vector>
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::Eq;
using ::testing::IsEmpty;
using ::testing::SizeIs;
TEST(MetricsAccumulatorTest, AddSampleToTheNewMetricWillCreateOne) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddSample(
"metric_name", "test_case_name",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/std::map<std::string, std::string>{{"key", "value"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kUnitless));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metric.metric_metadata, IsEmpty());
ASSERT_THAT(metric.time_series.samples, SizeIs(1));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
TEST(MetricsAccumulatorTest, AddSamplesToExistingMetricWontCreateNewOne) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddSample(
"metric_name", "test_case_name",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/
std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_FALSE(accumulator.AddSample(
"metric_name", "test_case_name",
/*value=*/20, Timestamp::Seconds(2),
/*point_metadata=*/
std::map<std::string, std::string>{{"key2", "value2"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kUnitless));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metric.metric_metadata, IsEmpty());
ASSERT_THAT(metric.time_series.samples, SizeIs(2));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key1", "value1"}}));
EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0));
EXPECT_THAT(metric.time_series.samples[1].timestamp,
Eq(Timestamp::Seconds(2)));
EXPECT_THAT(metric.time_series.samples[1].sample_metadata,
Eq(std::map<std::string, std::string>{{"key2", "value2"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(15.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(5.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(20.0));
}
TEST(MetricsAccumulatorTest, AddSampleToDifferentMetricsWillCreateBoth) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddSample(
"metric_name1", "test_case_name1",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/
std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_TRUE(accumulator.AddSample(
"metric_name2", "test_case_name2",
/*value=*/20, Timestamp::Seconds(2),
/*point_metadata=*/
std::map<std::string, std::string>{{"key2", "value2"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[0].unit, Eq(Unit::kUnitless));
EXPECT_THAT(metrics[0].improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metrics[0].metric_metadata, IsEmpty());
ASSERT_THAT(metrics[0].time_series.samples, SizeIs(1));
EXPECT_THAT(metrics[0].time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metrics[0].time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_THAT(metrics[0].stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metrics[0].stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metrics[0].stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metrics[0].stats.max, absl::optional<double>(10.0));
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
EXPECT_THAT(metrics[1].unit, Eq(Unit::kUnitless));
EXPECT_THAT(metrics[1].improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metrics[1].metric_metadata, IsEmpty());
ASSERT_THAT(metrics[1].time_series.samples, SizeIs(1));
EXPECT_THAT(metrics[1].time_series.samples[0].value, Eq(20.0));
EXPECT_THAT(metrics[1].time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(2)));
EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key2", "value2"}}));
ASSERT_THAT(metrics[1].stats.mean, absl::optional<double>(20.0));
ASSERT_THAT(metrics[1].stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metrics[1].stats.min, absl::optional<double>(20.0));
ASSERT_THAT(metrics[1].stats.max, absl::optional<double>(20.0));
}
TEST(MetricsAccumulatorTest, AddMetadataToTheNewMetricWillCreateOne) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddMetricMetadata(
"metric_name", "test_case_name", Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key", "value"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.time_series.samples, IsEmpty());
ASSERT_THAT(metric.stats.mean, absl::nullopt);
ASSERT_THAT(metric.stats.stddev, absl::nullopt);
ASSERT_THAT(metric.stats.min, absl::nullopt);
ASSERT_THAT(metric.stats.max, absl::nullopt);
}
TEST(MetricsAccumulatorTest,
AddMetadataToTheExistingMetricWillOverwriteValues) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddMetricMetadata(
"metric_name", "test_case_name", Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_FALSE(accumulator.AddMetricMetadata(
"metric_name", "test_case_name", Unit::kBytes,
ImprovementDirection::kSmallerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key2", "value2"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kBytes));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kSmallerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key2", "value2"}}));
ASSERT_THAT(metric.time_series.samples, IsEmpty());
ASSERT_THAT(metric.stats.mean, absl::nullopt);
ASSERT_THAT(metric.stats.stddev, absl::nullopt);
ASSERT_THAT(metric.stats.min, absl::nullopt);
ASSERT_THAT(metric.stats.max, absl::nullopt);
}
TEST(MetricsAccumulatorTest, AddMetadataToDifferentMetricsWillCreateBoth) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddMetricMetadata(
"metric_name1", "test_case_name1", Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_TRUE(accumulator.AddMetricMetadata(
"metric_name2", "test_case_name2", Unit::kBytes,
ImprovementDirection::kSmallerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key2", "value2"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[0].unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metrics[0].improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metrics[0].metric_metadata,
Eq(std::map<std::string, std::string>{{"key1", "value1"}}));
ASSERT_THAT(metrics[0].time_series.samples, IsEmpty());
ASSERT_THAT(metrics[0].stats.mean, absl::nullopt);
ASSERT_THAT(metrics[0].stats.stddev, absl::nullopt);
ASSERT_THAT(metrics[0].stats.min, absl::nullopt);
ASSERT_THAT(metrics[0].stats.max, absl::nullopt);
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
EXPECT_THAT(metrics[1].unit, Eq(Unit::kBytes));
EXPECT_THAT(metrics[1].improvement_direction,
Eq(ImprovementDirection::kSmallerIsBetter));
EXPECT_THAT(metrics[1].metric_metadata,
Eq(std::map<std::string, std::string>{{"key2", "value2"}}));
ASSERT_THAT(metrics[1].time_series.samples, IsEmpty());
ASSERT_THAT(metrics[1].stats.mean, absl::nullopt);
ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt);
ASSERT_THAT(metrics[1].stats.min, absl::nullopt);
ASSERT_THAT(metrics[1].stats.max, absl::nullopt);
}
TEST(MetricsAccumulatorTest, AddMetadataAfterAddingSampleWontCreateNewMetric) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddSample(
"metric_name", "test_case_name",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/
std::map<std::string, std::string>{{"key_s", "value_s"}}));
ASSERT_FALSE(accumulator.AddMetricMetadata(
"metric_name", "test_case_name", Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key_m", "value_m"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key_m", "value_m"}}));
ASSERT_THAT(metric.time_series.samples, SizeIs(1));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key_s", "value_s"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
TEST(MetricsAccumulatorTest, AddSampleAfterAddingMetadataWontCreateNewMetric) {
MetricsAccumulator accumulator;
ASSERT_TRUE(accumulator.AddMetricMetadata(
"metric_name", "test_case_name", Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
/*metric_metadata=*/
std::map<std::string, std::string>{{"key_m", "value_m"}}));
ASSERT_FALSE(accumulator.AddSample(
"metric_name", "test_case_name",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/
std::map<std::string, std::string>{{"key_s", "value_s"}}));
std::vector<Metric> metrics = accumulator.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key_m", "value_m"}}));
ASSERT_THAT(metric.time_series.samples, SizeIs(1));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key_s", "value_s"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,33 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_METRICS_EXPORTER_H_
#define API_TEST_METRICS_METRICS_EXPORTER_H_
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
namespace webrtc {
namespace test {
// Exports metrics in the requested format.
class MetricsExporter {
public:
virtual ~MetricsExporter() = default;
// Exports specified metrics in a format that depends on the implementation.
// Returns true if export succeeded, false otherwise.
virtual bool Export(rtc::ArrayView<const Metric> metrics) = 0;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_METRICS_EXPORTER_H_

View file

@ -0,0 +1,114 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_logger.h"
#include <map>
#include <string>
#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/metrics/metric.h"
#include "rtc_base/synchronization/mutex.h"
namespace webrtc {
namespace test {
namespace {
Metric::Stats ToStats(const SamplesStatsCounter& values) {
if (values.IsEmpty()) {
return Metric::Stats();
}
return Metric::Stats{.mean = values.GetAverage(),
.stddev = values.GetStandardDeviation(),
.min = values.GetMin(),
.max = values.GetMax()};
}
} // namespace
void DefaultMetricsLogger::LogSingleValueMetric(
absl::string_view name,
absl::string_view test_case_name,
double value,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata) {
MutexLock lock(&mutex_);
metrics_.push_back(Metric{
.name = std::string(name),
.unit = unit,
.improvement_direction = improvement_direction,
.test_case = std::string(test_case_name),
.metric_metadata = std::move(metadata),
.time_series =
Metric::TimeSeries{.samples = std::vector{Metric::TimeSeries::Sample{
.timestamp = Now(), .value = value}}},
.stats = Metric::Stats{
.mean = value, .stddev = absl::nullopt, .min = value, .max = value}});
}
void DefaultMetricsLogger::LogMetric(
absl::string_view name,
absl::string_view test_case_name,
const SamplesStatsCounter& values,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata) {
MutexLock lock(&mutex_);
Metric::TimeSeries time_series;
for (const SamplesStatsCounter::StatsSample& sample :
values.GetTimedSamples()) {
time_series.samples.push_back(
Metric::TimeSeries::Sample{.timestamp = sample.time,
.value = sample.value,
.sample_metadata = sample.metadata});
}
metrics_.push_back(Metric{.name = std::string(name),
.unit = unit,
.improvement_direction = improvement_direction,
.test_case = std::string(test_case_name),
.metric_metadata = std::move(metadata),
.time_series = std::move(time_series),
.stats = ToStats(values)});
}
void DefaultMetricsLogger::LogMetric(
absl::string_view name,
absl::string_view test_case_name,
const Metric::Stats& metric_stats,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata) {
MutexLock lock(&mutex_);
metrics_.push_back(Metric{.name = std::string(name),
.unit = unit,
.improvement_direction = improvement_direction,
.test_case = std::string(test_case_name),
.metric_metadata = std::move(metadata),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = std::move(metric_stats)});
}
std::vector<Metric> DefaultMetricsLogger::GetCollectedMetrics() const {
std::vector<Metric> out = metrics_accumulator_.GetCollectedMetrics();
MutexLock lock(&mutex_);
out.insert(out.end(), metrics_.begin(), metrics_.end());
return out;
}
Timestamp DefaultMetricsLogger::Now() {
return clock_->CurrentTime();
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,112 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_METRICS_LOGGER_H_
#define API_TEST_METRICS_METRICS_LOGGER_H_
#include <map>
#include <string>
#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_accumulator.h"
#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
namespace test {
// Provides API to log and collect performance metrics.
class MetricsLogger {
public:
virtual ~MetricsLogger() = default;
// Adds a metric with a single value.
// `metadata` - metric's level metadata to add.
virtual void LogSingleValueMetric(
absl::string_view name,
absl::string_view test_case_name,
double value,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) = 0;
// Adds metrics with a time series created based on the provided `values`.
// `metadata` - metric's level metadata to add.
virtual void LogMetric(absl::string_view name,
absl::string_view test_case_name,
const SamplesStatsCounter& values,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) = 0;
// Adds metric with a time series with only stats object and without actual
// collected values.
// `metadata` - metric's level metadata to add.
virtual void LogMetric(absl::string_view name,
absl::string_view test_case_name,
const Metric::Stats& metric_stats,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) = 0;
// Returns all metrics collected by this logger.
virtual std::vector<Metric> GetCollectedMetrics() const = 0;
};
class DefaultMetricsLogger : public MetricsLogger {
public:
explicit DefaultMetricsLogger(webrtc::Clock* clock) : clock_(clock) {}
~DefaultMetricsLogger() override = default;
void LogSingleValueMetric(
absl::string_view name,
absl::string_view test_case_name,
double value,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) override;
void LogMetric(absl::string_view name,
absl::string_view test_case_name,
const SamplesStatsCounter& values,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) override;
void LogMetric(absl::string_view name,
absl::string_view test_case_name,
const Metric::Stats& metric_stats,
Unit unit,
ImprovementDirection improvement_direction,
std::map<std::string, std::string> metadata = {}) override;
// Returns all metrics collected by this logger and its `MetricsAccumulator`.
std::vector<Metric> GetCollectedMetrics() const override;
MetricsAccumulator* GetMetricsAccumulator() { return &metrics_accumulator_; }
private:
webrtc::Timestamp Now();
webrtc::Clock* const clock_;
MetricsAccumulator metrics_accumulator_;
mutable Mutex mutex_;
std::vector<Metric> metrics_ RTC_GUARDED_BY(mutex_);
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_METRICS_LOGGER_H_

View file

@ -0,0 +1,326 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_logger.h"
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/numerics/samples_stats_counter.h"
#include "api/test/metrics/metric.h"
#include "system_wrappers/include/clock.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::Eq;
using ::testing::IsEmpty;
using ::testing::SizeIs;
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMetric) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.LogSingleValueMetric(
"metric_name", "test_case_name",
/*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
std::map<std::string, std::string>{{"key", "value"}});
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.time_series.samples, SizeIs(1));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::nullopt);
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
TEST(DefaultMetricsLoggerTest, LogMetricWithSamplesStatsCounterRecordsMetric) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
SamplesStatsCounter values;
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 10,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata =
std::map<std::string, std::string>{{"point_key1", "value1"}}});
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 20,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata =
std::map<std::string, std::string>{{"point_key2", "value2"}}});
logger.LogMetric("metric_name", "test_case_name", values, Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
std::map<std::string, std::string>{{"key", "value"}});
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.time_series.samples, SizeIs(2));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"point_key1", "value1"}}));
EXPECT_THAT(metric.time_series.samples[1].value, Eq(20.0));
EXPECT_THAT(metric.time_series.samples[1].sample_metadata,
Eq(std::map<std::string, std::string>{{"point_key2", "value2"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(15.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(5.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(20.0));
}
TEST(DefaultMetricsLoggerTest,
LogMetricWithEmptySamplesStatsCounterRecordsEmptyMetric) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
SamplesStatsCounter values;
logger.LogMetric("metric_name", "test_case_name", values, Unit::kUnitless,
ImprovementDirection::kBiggerIsBetter, DefaultMetadata());
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
EXPECT_THAT(metrics[0].name, Eq("metric_name"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name"));
EXPECT_THAT(metrics[0].time_series.samples, IsEmpty());
ASSERT_THAT(metrics[0].stats.mean, Eq(absl::nullopt));
ASSERT_THAT(metrics[0].stats.stddev, Eq(absl::nullopt));
ASSERT_THAT(metrics[0].stats.min, Eq(absl::nullopt));
ASSERT_THAT(metrics[0].stats.max, Eq(absl::nullopt));
}
TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMetric) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20};
logger.LogMetric("metric_name", "test_case_name", metric_stats,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
std::map<std::string, std::string>{{"key", "value"}});
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metric.metric_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.time_series.samples, IsEmpty());
ASSERT_THAT(metric.stats.mean, absl::optional<double>(15.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(5.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(20.0));
}
TEST(DefaultMetricsLoggerTest, LogSingleValueMetricRecordsMultipleMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.LogSingleValueMetric("metric_name1", "test_case_name1",
/*value=*/10, Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
logger.LogSingleValueMetric("metric_name2", "test_case_name2",
/*value=*/10, Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
}
TEST(DefaultMetricsLoggerTest,
LogMetricWithSamplesStatsCounterRecordsMultipleMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
SamplesStatsCounter values;
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 10,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata = DefaultMetadata()});
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 20,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata = DefaultMetadata()});
logger.LogMetric("metric_name1", "test_case_name1", values,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
logger.LogMetric("metric_name2", "test_case_name2", values,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
}
TEST(DefaultMetricsLoggerTest, LogMetricWithStatsRecordsMultipleMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20};
logger.LogMetric("metric_name1", "test_case_name1", metric_stats,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
logger.LogMetric("metric_name2", "test_case_name2", metric_stats,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
}
TEST(DefaultMetricsLoggerTest,
LogMetricThroughtAllMethodsAccumulateAllMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
SamplesStatsCounter values;
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 10,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata = DefaultMetadata()});
values.AddSample(SamplesStatsCounter::StatsSample{
.value = 20,
.time = Clock::GetRealTimeClock()->CurrentTime(),
.metadata = DefaultMetadata()});
Metric::Stats metric_stats{.mean = 15, .stddev = 5, .min = 10, .max = 20};
logger.LogSingleValueMetric("metric_name1", "test_case_name1",
/*value=*/10, Unit::kMilliseconds,
ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
logger.LogMetric("metric_name2", "test_case_name2", values,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
logger.LogMetric("metric_name3", "test_case_name3", metric_stats,
Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
DefaultMetadata());
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics.size(), Eq(3lu));
EXPECT_THAT(metrics[0].name, Eq("metric_name1"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[1].name, Eq("metric_name2"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name2"));
EXPECT_THAT(metrics[2].name, Eq("metric_name3"));
EXPECT_THAT(metrics[2].test_case, Eq("test_case_name3"));
}
TEST(DefaultMetricsLoggerTest, AccumulatedMetricsReturnedInCollectedMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.GetMetricsAccumulator()->AddSample(
"metric_name", "test_case_name",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/std::map<std::string, std::string>{{"key", "value"}});
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(1));
const Metric& metric = metrics[0];
EXPECT_THAT(metric.name, Eq("metric_name"));
EXPECT_THAT(metric.test_case, Eq("test_case_name"));
EXPECT_THAT(metric.unit, Eq(Unit::kUnitless));
EXPECT_THAT(metric.improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metric.metric_metadata, IsEmpty());
ASSERT_THAT(metric.time_series.samples, SizeIs(1));
EXPECT_THAT(metric.time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metric.time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metric.time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key", "value"}}));
ASSERT_THAT(metric.stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metric.stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metric.stats.max, absl::optional<double>(10.0));
}
TEST(DefaultMetricsLoggerTest,
AccumulatedMetricsReturnedTogetherWithLoggedMetrics) {
DefaultMetricsLogger logger(Clock::GetRealTimeClock());
logger.LogSingleValueMetric(
"metric_name1", "test_case_name1",
/*value=*/10, Unit::kMilliseconds, ImprovementDirection::kBiggerIsBetter,
std::map<std::string, std::string>{{"key_m", "value_m"}});
logger.GetMetricsAccumulator()->AddSample(
"metric_name2", "test_case_name2",
/*value=*/10, Timestamp::Seconds(1),
/*point_metadata=*/
std::map<std::string, std::string>{{"key_s", "value_s"}});
std::vector<Metric> metrics = logger.GetCollectedMetrics();
ASSERT_THAT(metrics, SizeIs(2));
EXPECT_THAT(metrics[0].name, Eq("metric_name2"));
EXPECT_THAT(metrics[0].test_case, Eq("test_case_name2"));
EXPECT_THAT(metrics[0].unit, Eq(Unit::kUnitless));
EXPECT_THAT(metrics[0].improvement_direction,
Eq(ImprovementDirection::kNeitherIsBetter));
EXPECT_THAT(metrics[0].metric_metadata, IsEmpty());
ASSERT_THAT(metrics[0].time_series.samples, SizeIs(1));
EXPECT_THAT(metrics[0].time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metrics[0].time_series.samples[0].timestamp,
Eq(Timestamp::Seconds(1)));
EXPECT_THAT(metrics[0].time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{{"key_s", "value_s"}}));
ASSERT_THAT(metrics[0].stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metrics[0].stats.stddev, absl::optional<double>(0.0));
ASSERT_THAT(metrics[0].stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metrics[0].stats.max, absl::optional<double>(10.0));
EXPECT_THAT(metrics[1].name, Eq("metric_name1"));
EXPECT_THAT(metrics[1].test_case, Eq("test_case_name1"));
EXPECT_THAT(metrics[1].unit, Eq(Unit::kMilliseconds));
EXPECT_THAT(metrics[1].improvement_direction,
Eq(ImprovementDirection::kBiggerIsBetter));
EXPECT_THAT(metrics[1].metric_metadata,
Eq(std::map<std::string, std::string>{{"key_m", "value_m"}}));
ASSERT_THAT(metrics[1].time_series.samples, SizeIs(1));
EXPECT_THAT(metrics[1].time_series.samples[0].value, Eq(10.0));
EXPECT_THAT(metrics[1].time_series.samples[0].sample_metadata,
Eq(std::map<std::string, std::string>{}));
ASSERT_THAT(metrics[1].stats.mean, absl::optional<double>(10.0));
ASSERT_THAT(metrics[1].stats.stddev, absl::nullopt);
ASSERT_THAT(metrics[1].stats.min, absl::optional<double>(10.0));
ASSERT_THAT(metrics[1].stats.max, absl::optional<double>(10.0));
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,166 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_set_proto_file_exporter.h"
#include <stdio.h>
#include <map>
#include <string>
#include <utility>
#include "api/test/metrics/metric.h"
#include "rtc_base/logging.h"
#include "test/testsupport/file_utils.h"
#if WEBRTC_ENABLE_PROTOBUF
#include "api/test/metrics/proto/metric.pb.h"
#endif
namespace webrtc {
namespace test {
namespace {
#if WEBRTC_ENABLE_PROTOBUF
webrtc::test_metrics::Unit ToProtoUnit(Unit unit) {
switch (unit) {
case Unit::kMilliseconds:
return webrtc::test_metrics::Unit::MILLISECONDS;
case Unit::kPercent:
return webrtc::test_metrics::Unit::PERCENT;
case Unit::kBytes:
return webrtc::test_metrics::Unit::BYTES;
case Unit::kKilobitsPerSecond:
return webrtc::test_metrics::Unit::KILOBITS_PER_SECOND;
case Unit::kHertz:
return webrtc::test_metrics::Unit::HERTZ;
case Unit::kUnitless:
return webrtc::test_metrics::Unit::UNITLESS;
case Unit::kCount:
return webrtc::test_metrics::Unit::COUNT;
}
}
webrtc::test_metrics::ImprovementDirection ToProtoImprovementDirection(
ImprovementDirection direction) {
switch (direction) {
case ImprovementDirection::kBiggerIsBetter:
return webrtc::test_metrics::ImprovementDirection::BIGGER_IS_BETTER;
case ImprovementDirection::kNeitherIsBetter:
return webrtc::test_metrics::ImprovementDirection::NEITHER_IS_BETTER;
case ImprovementDirection::kSmallerIsBetter:
return webrtc::test_metrics::ImprovementDirection::SMALLER_IS_BETTER;
}
}
void SetTimeSeries(
const Metric::TimeSeries& time_series,
webrtc::test_metrics::Metric::TimeSeries* proto_time_series) {
for (const Metric::TimeSeries::Sample& sample : time_series.samples) {
webrtc::test_metrics::Metric::TimeSeries::Sample* proto_sample =
proto_time_series->add_samples();
proto_sample->set_value(sample.value);
proto_sample->set_timestamp_us(sample.timestamp.us());
for (const auto& [key, value] : sample.sample_metadata) {
proto_sample->mutable_sample_metadata()->insert({key, value});
}
}
}
void SetStats(const Metric::Stats& stats,
webrtc::test_metrics::Metric::Stats* proto_stats) {
if (stats.mean.has_value()) {
proto_stats->set_mean(*stats.mean);
}
if (stats.stddev.has_value()) {
proto_stats->set_stddev(*stats.stddev);
}
if (stats.min.has_value()) {
proto_stats->set_min(*stats.min);
}
if (stats.max.has_value()) {
proto_stats->set_max(*stats.max);
}
}
bool WriteMetricsToFile(const std::string& path,
const webrtc::test_metrics::MetricsSet& metrics_set) {
std::string data;
bool ok = metrics_set.SerializeToString(&data);
if (!ok) {
RTC_LOG(LS_ERROR) << "Failed to serialize histogram set to string";
return false;
}
CreateDir(DirName(path));
FILE* output = fopen(path.c_str(), "wb");
if (output == NULL) {
RTC_LOG(LS_ERROR) << "Failed to write to " << path;
return false;
}
size_t written = fwrite(data.c_str(), sizeof(char), data.size(), output);
fclose(output);
if (written != data.size()) {
size_t expected = data.size();
RTC_LOG(LS_ERROR) << "Wrote " << written << ", tried to write " << expected;
return false;
}
return true;
}
#endif // WEBRTC_ENABLE_PROTOBUF
} // namespace
MetricsSetProtoFileExporter::Options::Options(
absl::string_view export_file_path)
: export_file_path(export_file_path) {}
MetricsSetProtoFileExporter::Options::Options(
absl::string_view export_file_path,
bool export_whole_time_series)
: export_file_path(export_file_path),
export_whole_time_series(export_whole_time_series) {}
MetricsSetProtoFileExporter::Options::Options(
absl::string_view export_file_path,
std::map<std::string, std::string> metadata)
: export_file_path(export_file_path), metadata(std::move(metadata)) {}
bool MetricsSetProtoFileExporter::Export(rtc::ArrayView<const Metric> metrics) {
#if WEBRTC_ENABLE_PROTOBUF
webrtc::test_metrics::MetricsSet metrics_set;
for (const auto& [key, value] : options_.metadata) {
metrics_set.mutable_metadata()->insert({key, value});
}
for (const Metric& metric : metrics) {
webrtc::test_metrics::Metric* metric_proto = metrics_set.add_metrics();
metric_proto->set_name(metric.name);
metric_proto->set_unit(ToProtoUnit(metric.unit));
metric_proto->set_improvement_direction(
ToProtoImprovementDirection(metric.improvement_direction));
metric_proto->set_test_case(metric.test_case);
for (const auto& [key, value] : metric.metric_metadata) {
metric_proto->mutable_metric_metadata()->insert({key, value});
}
if (options_.export_whole_time_series) {
SetTimeSeries(metric.time_series, metric_proto->mutable_time_series());
}
SetStats(metric.stats, metric_proto->mutable_stats());
}
return WriteMetricsToFile(options_.export_file_path, metrics_set);
#else
RTC_LOG(LS_ERROR)
<< "Compile with protobuf support to properly use this class";
return false;
#endif
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,59 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_
#define API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_
#include <map>
#include <string>
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_exporter.h"
namespace webrtc {
namespace test {
// Exports all collected metrics to the proto file using
// `webrtc::test_metrics::MetricsSet` format.
class MetricsSetProtoFileExporter : public MetricsExporter {
public:
struct Options {
explicit Options(absl::string_view export_file_path);
Options(absl::string_view export_file_path, bool export_whole_time_series);
Options(absl::string_view export_file_path,
std::map<std::string, std::string> metadata);
// File to export proto.
std::string export_file_path;
// If true will write all time series values to the output proto file,
// otherwise will write stats only.
bool export_whole_time_series = true;
// Metadata associated to the whole MetricsSet.
std::map<std::string, std::string> metadata;
};
explicit MetricsSetProtoFileExporter(const Options& options)
: options_(options) {}
MetricsSetProtoFileExporter(const MetricsSetProtoFileExporter&) = delete;
MetricsSetProtoFileExporter& operator=(const MetricsSetProtoFileExporter&) =
delete;
bool Export(rtc::ArrayView<const Metric> metrics) override;
private:
const Options options_;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_METRICS_SET_PROTO_FILE_EXPORTER_H_

View file

@ -0,0 +1,172 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/metrics_set_proto_file_exporter.h"
#include <fstream>
#include <map>
#include <string>
#include <vector>
#include "api/test/metrics/metric.h"
#include "api/test/metrics/proto/metric.pb.h"
#include "api/units/timestamp.h"
#include "rtc_base/protobuf_utils.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "test/testsupport/file_utils.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::Eq;
using ::testing::Test;
namespace proto = ::webrtc::test_metrics;
std::string ReadFileAsString(const std::string& filename) {
std::ifstream infile(filename, std::ios_base::binary);
auto buffer = std::vector<char>(std::istreambuf_iterator<char>(infile),
std::istreambuf_iterator<char>());
return std::string(buffer.begin(), buffer.end());
}
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
Metric::TimeSeries::Sample Sample(double value) {
return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1),
.value = value,
.sample_metadata = DefaultMetadata()};
}
void AssertSamplesEqual(const proto::Metric::TimeSeries::Sample& actual_sample,
const Metric::TimeSeries::Sample& expected_sample) {
EXPECT_THAT(actual_sample.value(), Eq(expected_sample.value));
EXPECT_THAT(actual_sample.timestamp_us(), Eq(expected_sample.timestamp.us()));
EXPECT_THAT(actual_sample.sample_metadata().size(),
Eq(expected_sample.sample_metadata.size()));
for (const auto& [key, value] : expected_sample.sample_metadata) {
EXPECT_THAT(actual_sample.sample_metadata().at(key), Eq(value));
}
}
class MetricsSetProtoFileExporterTest : public Test {
protected:
~MetricsSetProtoFileExporterTest() override = default;
void SetUp() override {
temp_filename_ = webrtc::test::TempFilename(
webrtc::test::OutputPath(), "metrics_set_proto_file_exporter_test");
}
void TearDown() override {
ASSERT_TRUE(webrtc::test::RemoveFile(temp_filename_));
}
std::string temp_filename_;
};
TEST_F(MetricsSetProtoFileExporterTest, MetricsAreExportedCorrectly) {
MetricsSetProtoFileExporter::Options options(temp_filename_);
MetricsSetProtoFileExporter exporter(options);
Metric metric1{
.name = "test_metric1",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name1",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}},
.stats =
Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
Metric metric2{
.name = "test_metric2",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.test_case = "test_case_name2",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}},
.stats = Metric::Stats{
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
ASSERT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
webrtc::test_metrics::MetricsSet actual_metrics_set;
actual_metrics_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_THAT(actual_metrics_set.metrics().size(), Eq(2));
EXPECT_THAT(actual_metrics_set.metrics(0).name(), Eq("test_metric1"));
EXPECT_THAT(actual_metrics_set.metrics(0).test_case(), Eq("test_case_name1"));
EXPECT_THAT(actual_metrics_set.metrics(0).unit(),
Eq(proto::Unit::MILLISECONDS));
EXPECT_THAT(actual_metrics_set.metrics(0).improvement_direction(),
Eq(proto::ImprovementDirection::BIGGER_IS_BETTER));
EXPECT_THAT(actual_metrics_set.metrics(0).metric_metadata().size(), Eq(1lu));
EXPECT_THAT(actual_metrics_set.metrics(0).metric_metadata().at("key"),
Eq("value"));
EXPECT_THAT(actual_metrics_set.metrics(0).time_series().samples().size(),
Eq(2));
AssertSamplesEqual(actual_metrics_set.metrics(0).time_series().samples(0),
Sample(10.0));
AssertSamplesEqual(actual_metrics_set.metrics(0).time_series().samples(1),
Sample(20.0));
EXPECT_THAT(actual_metrics_set.metrics(0).stats().mean(), Eq(15.0));
EXPECT_THAT(actual_metrics_set.metrics(0).stats().stddev(), Eq(5.0));
EXPECT_THAT(actual_metrics_set.metrics(0).stats().min(), Eq(10.0));
EXPECT_THAT(actual_metrics_set.metrics(0).stats().max(), Eq(20.0));
EXPECT_THAT(actual_metrics_set.metrics(1).name(), Eq("test_metric2"));
EXPECT_THAT(actual_metrics_set.metrics(1).test_case(), Eq("test_case_name2"));
EXPECT_THAT(actual_metrics_set.metrics(1).unit(),
Eq(proto::Unit::KILOBITS_PER_SECOND));
EXPECT_THAT(actual_metrics_set.metrics(1).improvement_direction(),
Eq(proto::ImprovementDirection::SMALLER_IS_BETTER));
EXPECT_THAT(actual_metrics_set.metrics(1).metric_metadata().size(), Eq(1lu));
EXPECT_THAT(actual_metrics_set.metrics(1).metric_metadata().at("key"),
Eq("value"));
EXPECT_THAT(actual_metrics_set.metrics(1).time_series().samples().size(),
Eq(2));
AssertSamplesEqual(actual_metrics_set.metrics(1).time_series().samples(0),
Sample(20.0));
AssertSamplesEqual(actual_metrics_set.metrics(1).time_series().samples(1),
Sample(40.0));
EXPECT_THAT(actual_metrics_set.metrics(1).stats().mean(), Eq(30.0));
EXPECT_THAT(actual_metrics_set.metrics(1).stats().stddev(), Eq(10.0));
EXPECT_THAT(actual_metrics_set.metrics(1).stats().min(), Eq(20.0));
EXPECT_THAT(actual_metrics_set.metrics(1).stats().max(), Eq(40.0));
}
TEST_F(MetricsSetProtoFileExporterTest, NoMetricsSetMetadata) {
MetricsSetProtoFileExporter::Options options(temp_filename_);
MetricsSetProtoFileExporter exporter(options);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{}));
webrtc::test_metrics::MetricsSet actual_metrics_set;
actual_metrics_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_EQ(actual_metrics_set.metadata_size(), 0);
}
TEST_F(MetricsSetProtoFileExporterTest, MetricsSetMetadata) {
MetricsSetProtoFileExporter::Options options(
temp_filename_, {{"a_metadata_key", "a_metadata_value"}});
MetricsSetProtoFileExporter exporter(options);
ASSERT_TRUE(exporter.Export(std::vector<Metric>{}));
webrtc::test_metrics::MetricsSet actual_metrics_set;
actual_metrics_set.ParseFromString(ReadFileAsString(temp_filename_));
EXPECT_EQ(actual_metrics_set.metadata_size(), 1);
EXPECT_EQ(actual_metrics_set.metadata().at("a_metadata_key"),
"a_metadata_value");
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,157 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/print_result_proxy_metrics_exporter.h"
#include <string>
#include <unordered_set>
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
namespace test {
namespace {
std::string ToPrintResultUnit(Unit unit) {
switch (unit) {
case Unit::kMilliseconds:
return "msBestFitFormat";
case Unit::kPercent:
return "n%";
case Unit::kBytes:
return "sizeInBytes";
case Unit::kKilobitsPerSecond:
// PrintResults prefer Chrome Perf Dashboard units, which doesn't have
// kpbs units, so we change the unit and value accordingly.
return "bytesPerSecond";
case Unit::kHertz:
return "Hz";
case Unit::kUnitless:
return "unitless";
case Unit::kCount:
return "count";
}
}
double ToPrintResultValue(double value, Unit unit) {
switch (unit) {
case Unit::kKilobitsPerSecond:
// PrintResults prefer Chrome Perf Dashboard units, which doesn't have
// kpbs units, so we change the unit and value accordingly.
return value * 1000 / 8;
default:
return value;
}
}
ImproveDirection ToPrintResultImproveDirection(ImprovementDirection direction) {
switch (direction) {
case ImprovementDirection::kBiggerIsBetter:
return ImproveDirection::kBiggerIsBetter;
case ImprovementDirection::kNeitherIsBetter:
return ImproveDirection::kNone;
case ImprovementDirection::kSmallerIsBetter:
return ImproveDirection::kSmallerIsBetter;
}
}
bool IsEmpty(const Metric::Stats& stats) {
return !stats.mean.has_value() && !stats.stddev.has_value() &&
!stats.min.has_value() && !stats.max.has_value();
}
bool NameEndsWithConnected(const std::string& name) {
static const std::string suffix = "_connected";
return name.size() >= suffix.size() &&
0 == name.compare(name.size() - suffix.size(), suffix.size(), suffix);
}
} // namespace
bool PrintResultProxyMetricsExporter::Export(
rtc::ArrayView<const Metric> metrics) {
static const std::unordered_set<std::string> per_call_metrics{
"actual_encode_bitrate",
"encode_frame_rate",
"harmonic_framerate",
"max_skipped",
"min_psnr_dB",
"retransmission_bitrate",
"sent_packets_loss",
"transmission_bitrate",
"dropped_frames",
"frames_in_flight",
"rendered_frames",
"average_receive_rate",
"average_send_rate",
"bytes_discarded_no_receiver",
"bytes_received",
"bytes_sent",
"packets_discarded_no_receiver",
"packets_received",
"packets_sent",
"payload_bytes_received",
"payload_bytes_sent",
"cpu_usage"};
for (const Metric& metric : metrics) {
if (metric.time_series.samples.empty() && IsEmpty(metric.stats)) {
// If there were no data collected for the metric it is expected that 0
// will be exported, so add 0 to the samples.
PrintResult(metric.name, /*modifier=*/"", metric.test_case,
ToPrintResultValue(0, metric.unit),
ToPrintResultUnit(metric.unit), /*important=*/false,
ToPrintResultImproveDirection(metric.improvement_direction));
continue;
}
if (metric.time_series.samples.empty()) {
PrintResultMeanAndError(
metric.name, /*modifier=*/"", metric.test_case,
ToPrintResultValue(*metric.stats.mean, metric.unit),
ToPrintResultValue(*metric.stats.stddev, metric.unit),
ToPrintResultUnit(metric.unit),
/*important=*/false,
ToPrintResultImproveDirection(metric.improvement_direction));
continue;
}
if (metric.time_series.samples.size() == 1lu &&
(per_call_metrics.count(metric.name) > 0 ||
NameEndsWithConnected(metric.name))) {
// Increase backwards compatibility for 1 value use case.
PrintResult(
metric.name, /*modifier=*/"", metric.test_case,
ToPrintResultValue(metric.time_series.samples[0].value, metric.unit),
ToPrintResultUnit(metric.unit), /*important=*/false,
ToPrintResultImproveDirection(metric.improvement_direction));
continue;
}
SamplesStatsCounter counter;
for (size_t i = 0; i < metric.time_series.samples.size(); ++i) {
counter.AddSample(SamplesStatsCounter::StatsSample{
.value = ToPrintResultValue(metric.time_series.samples[i].value,
metric.unit),
.time = metric.time_series.samples[i].timestamp,
.metadata = metric.time_series.samples[i].sample_metadata});
}
PrintResult(metric.name, /*modifier=*/"", metric.test_case, counter,
ToPrintResultUnit(metric.unit),
/*important=*/false,
ToPrintResultImproveDirection(metric.improvement_direction));
}
return true;
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,32 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_
#define API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_exporter.h"
namespace webrtc {
namespace test {
// Proxies all exported metrics to the `webrtc::test::PrintResult` API.
class PrintResultProxyMetricsExporter : public MetricsExporter {
public:
~PrintResultProxyMetricsExporter() override = default;
bool Export(rtc::ArrayView<const Metric> metrics) override;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_PRINT_RESULT_PROXY_METRICS_EXPORTER_H_

View file

@ -0,0 +1,177 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/print_result_proxy_metrics_exporter.h"
#include <map>
#include <string>
#include <vector>
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::TestWithParam;
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
Metric::TimeSeries::Sample Sample(double value) {
return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1),
.value = value,
.sample_metadata = DefaultMetadata()};
}
TEST(PrintResultProxyMetricsExporterTest,
ExportMetricsWithTimeSeriesFormatCorrect) {
Metric metric1{
.name = "test_metric1",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name1",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}},
.stats =
Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
Metric metric2{
.name = "test_metric2",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.test_case = "test_case_name2",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}},
.stats = Metric::Stats{
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
testing::internal::CaptureStdout();
PrintResultProxyMetricsExporter exporter;
std::string expected =
"RESULT test_metric1: test_case_name1= {15,5} "
"msBestFitFormat_biggerIsBetter\n"
"RESULT test_metric2: test_case_name2= {3750,1250} "
"bytesPerSecond_smallerIsBetter\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(PrintResultProxyMetricsExporterTest,
ExportMetricsTimeSeriesOfSingleValueBackwardCompatibleFormat) {
// This should be printed as {mean, stddev} despite only being a single data
// point.
Metric metric1{
.name = "available_send_bandwidth",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case/alice",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = std::vector{Sample(1000)}},
.stats = Metric::Stats{
.mean = 1000.0, .stddev = 0.0, .min = 1000.0, .max = 1000.0}};
// This is a per-call metric that shouldn't have a stddev estimate.
Metric metric2{
.name = "min_psnr_dB",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case/alice-video",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = std::vector{Sample(10)}},
.stats =
Metric::Stats{.mean = 10.0, .stddev = 0.0, .min = 10.0, .max = 10.0}};
// This is a per-call metric that shouldn't have a stddev estimate.
Metric metric3{
.name = "alice_connected",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = std::vector{Sample(1)}},
.stats =
Metric::Stats{.mean = 1.0, .stddev = 0.0, .min = 1.0, .max = 1.0}};
testing::internal::CaptureStdout();
PrintResultProxyMetricsExporter exporter;
std::string expected =
"RESULT available_send_bandwidth: test_case/alice= {125000,0} "
"bytesPerSecond_biggerIsBetter\n"
"RESULT min_psnr_dB: test_case/alice-video= 10 "
"unitless_biggerIsBetter\n"
"RESULT alice_connected: test_case= 1 "
"unitless_biggerIsBetter\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2, metric3}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(PrintResultProxyMetricsExporterTest,
ExportMetricsWithStatsOnlyFormatCorrect) {
Metric metric1{.name = "test_metric1",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name1",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{
.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
Metric metric2{
.name = "test_metric2",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.test_case = "test_case_name2",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
testing::internal::CaptureStdout();
PrintResultProxyMetricsExporter exporter;
std::string expected =
"RESULT test_metric1: test_case_name1= {15,5} "
"msBestFitFormat_biggerIsBetter\n"
"RESULT test_metric2: test_case_name2= {3750,1250} "
"bytesPerSecond_smallerIsBetter\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(PrintResultProxyMetricsExporterTest, ExportEmptyMetricOnlyFormatCorrect) {
Metric metric{.name = "test_metric",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name",
.metric_metadata = DefaultMetadata(),
.time_series = Metric::TimeSeries{.samples = {}},
.stats = Metric::Stats{}};
testing::internal::CaptureStdout();
PrintResultProxyMetricsExporter exporter;
std::string expected =
"RESULT test_metric: test_case_name= 0 "
"msBestFitFormat_biggerIsBetter\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
} // namespace
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,89 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
syntax = "proto3";
package webrtc.test_metrics;
// Root message of the proto file. Contains collection of all the metrics.
message MetricsSet {
repeated Metric metrics = 1;
// Metadata associated with the whole metrics set.
map<string, string> metadata = 2;
}
enum Unit {
// Default value that has to be defined.
UNDEFINED_UNIT = 0;
// General unitless value. Can be used either for dimensionless quantities
// (ex ratio) or for units not presented in this enum and too specific to add
// to this enum.
UNITLESS = 1;
MILLISECONDS = 2;
PERCENT = 3;
BYTES = 4;
KILOBITS_PER_SECOND = 5;
HERTZ = 6;
COUNT = 7;
}
enum ImprovementDirection {
// Default value that has to be defined.
UNDEFINED_IMPROVEMENT_DIRECTION = 0;
BIGGER_IS_BETTER = 1;
NEITHER_IS_BETTER = 2;
SMALLER_IS_BETTER = 3;
}
// Single performance metric with all related metadata.
message Metric {
// Metric name, for example PSNR, SSIM, decode_time, etc.
string name = 1;
Unit unit = 2;
ImprovementDirection improvement_direction = 3;
// If the metric is generated by a test, this field can be used to specify
// this information.
string test_case = 4;
// Metadata associated with the whole metric.
map<string, string> metric_metadata = 5;
message TimeSeries {
message Sample {
// Timestamp in microseconds associated with a sample. For example,
// the timestamp when the sample was collected.
int64 timestamp_us = 1;
double value = 2;
// Metadata associated with this particular sample.
map<string, string> sample_metadata = 3;
}
// All samples collected for this metric. It can be empty if the Metric
// object only contains `stats`.
repeated Sample samples = 1;
}
// Contains all samples of the metric collected during test execution.
// It can be empty if the user only stores precomputed statistics into
// `stats`.
TimeSeries time_series = 6;
// Contains metric's precomputed statistics based on the `time_series` or if
// `time_series` is omitted (has 0 samples) contains precomputed statistics
// provided by the metric's calculator.
message Stats {
// Sample mean of the metric
// (https://en.wikipedia.org/wiki/Sample_mean_and_covariance).
optional double mean = 1;
// Standard deviation (https://en.wikipedia.org/wiki/Standard_deviation).
// Is undefined if `time_series` contains only a single sample.
optional double stddev = 2;
optional double min = 3;
optional double max = 4;
}
Stats stats = 7;
}

View file

@ -0,0 +1,101 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/stdout_metrics_exporter.h"
#include <stdio.h>
#include <cmath>
#include <string>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
namespace test {
namespace {
// Returns positive integral part of the number.
int64_t IntegralPart(double value) {
return std::lround(std::floor(std::abs(value)));
}
void AppendWithPrecision(double value,
int digits_after_comma,
rtc::StringBuilder& out) {
int64_t multiplier = std::lround(std::pow(10, digits_after_comma));
int64_t integral_part = IntegralPart(value);
double decimal_part = std::abs(value) - integral_part;
// If decimal part has leading zeros then when it will be multiplied on
// `multiplier`, leading zeros will be lost. To preserve them we add "1"
// so then leading digit will be greater than 0 and won't be removed.
//
// During conversion to the string leading digit has to be stripped.
//
// Also due to rounding it may happen that leading digit may be incremented,
// like with `digits_after_comma` 3 number 1.9995 will be rounded to 2. In
// such case this increment has to be propagated to the `integral_part`.
int64_t decimal_holder = std::lround((1 + decimal_part) * multiplier);
if (decimal_holder >= 2 * multiplier) {
// Rounding incremented added leading digit, so we need to transfer 1 to
// integral part.
integral_part++;
decimal_holder -= multiplier;
}
// Remove trailing zeros.
while (decimal_holder % 10 == 0) {
decimal_holder /= 10;
}
// Print serialized number to output.
if (value < 0) {
out << "-";
}
out << integral_part;
if (decimal_holder != 1) {
out << "." << std::to_string(decimal_holder).substr(1, digits_after_comma);
}
}
} // namespace
StdoutMetricsExporter::StdoutMetricsExporter() : output_(stdout) {}
bool StdoutMetricsExporter::Export(rtc::ArrayView<const Metric> metrics) {
for (const Metric& metric : metrics) {
PrintMetric(metric);
}
return true;
}
void StdoutMetricsExporter::PrintMetric(const Metric& metric) {
rtc::StringBuilder value_stream;
value_stream << metric.test_case << " / " << metric.name << "= {mean=";
if (metric.stats.mean.has_value()) {
AppendWithPrecision(*metric.stats.mean, 8, value_stream);
} else {
value_stream << "-";
}
value_stream << ", stddev=";
if (metric.stats.stddev.has_value()) {
AppendWithPrecision(*metric.stats.stddev, 8, value_stream);
} else {
value_stream << "-";
}
value_stream << "} " << ToString(metric.unit) << " ("
<< ToString(metric.improvement_direction) << ")";
fprintf(output_, "RESULT: %s\n", value_stream.str().c_str());
}
} // namespace test
} // namespace webrtc

View file

@ -0,0 +1,41 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_
#define API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_
#include "api/array_view.h"
#include "api/test/metrics/metric.h"
#include "api/test/metrics/metrics_exporter.h"
namespace webrtc {
namespace test {
// Exports all collected metrics to stdout.
class StdoutMetricsExporter : public MetricsExporter {
public:
StdoutMetricsExporter();
~StdoutMetricsExporter() override = default;
StdoutMetricsExporter(const StdoutMetricsExporter&) = delete;
StdoutMetricsExporter& operator=(const StdoutMetricsExporter&) = delete;
bool Export(rtc::ArrayView<const Metric> metrics) override;
private:
void PrintMetric(const Metric& metric);
FILE* const output_;
};
} // namespace test
} // namespace webrtc
#endif // API_TEST_METRICS_STDOUT_METRICS_EXPORTER_H_

View file

@ -0,0 +1,211 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/metrics/stdout_metrics_exporter.h"
#include <map>
#include <string>
#include <vector>
#include "api/test/metrics/metric.h"
#include "api/units/timestamp.h"
#include "test/gmock.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::TestWithParam;
std::map<std::string, std::string> DefaultMetadata() {
return std::map<std::string, std::string>{{"key", "value"}};
}
Metric::TimeSeries::Sample Sample(double value) {
return Metric::TimeSeries::Sample{.timestamp = Timestamp::Seconds(1),
.value = value,
.sample_metadata = DefaultMetadata()};
}
Metric PsnrForTestFoo(double mean, double stddev) {
return Metric{.name = "psnr",
.unit = Unit::kUnitless,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "foo",
.time_series = Metric::TimeSeries{},
.stats = Metric::Stats{.mean = mean, .stddev = stddev}};
}
TEST(StdoutMetricsExporterTest, ExportMetricFormatCorrect) {
Metric metric1{
.name = "test_metric1",
.unit = Unit::kMilliseconds,
.improvement_direction = ImprovementDirection::kBiggerIsBetter,
.test_case = "test_case_name1",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(10), Sample(20)}},
.stats =
Metric::Stats{.mean = 15.0, .stddev = 5.0, .min = 10.0, .max = 20.0}};
Metric metric2{
.name = "test_metric2",
.unit = Unit::kKilobitsPerSecond,
.improvement_direction = ImprovementDirection::kSmallerIsBetter,
.test_case = "test_case_name2",
.metric_metadata = DefaultMetadata(),
.time_series =
Metric::TimeSeries{.samples = std::vector{Sample(20), Sample(40)}},
.stats = Metric::Stats{
.mean = 30.0, .stddev = 10.0, .min = 20.0, .max = 40.0}};
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
std::string expected =
"RESULT: test_case_name1 / test_metric1= "
"{mean=15, stddev=5} Milliseconds (BiggerIsBetter)\n"
"RESULT: test_case_name2 / test_metric2= "
"{mean=30, stddev=10} KilobitsPerSecond (SmallerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric1, metric2}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest, PositiveNumberMaxPrecision) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(15.00000001, 0.00000001);
std::string expected =
"RESULT: foo / psnr= "
"{mean=15.00000001, stddev=0.00000001} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
PositiveNumberTrailingZeroNotAdded) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(15.12345, 0.12);
std::string expected =
"RESULT: foo / psnr= "
"{mean=15.12345, stddev=0.12} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
PositiveNumberTrailingZeroAreRemoved) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(15.123450000, 0.120000000);
std::string expected =
"RESULT: foo / psnr= "
"{mean=15.12345, stddev=0.12} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
PositiveNumberRoundsUpOnPrecisionCorrectly) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(15.000000009, 0.999999999);
std::string expected =
"RESULT: foo / psnr= "
"{mean=15.00000001, stddev=1} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
PositiveNumberRoundsDownOnPrecisionCorrectly) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(15.0000000049, 0.9999999949);
std::string expected =
"RESULT: foo / psnr= "
"{mean=15, stddev=0.99999999} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest, NegativeNumberMaxPrecision) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(-15.00000001, -0.00000001);
std::string expected =
"RESULT: foo / psnr= "
"{mean=-15.00000001, stddev=-0.00000001} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
NegativeNumberTrailingZeroNotAdded) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(-15.12345, -0.12);
std::string expected =
"RESULT: foo / psnr= "
"{mean=-15.12345, stddev=-0.12} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
NegativeNumberTrailingZeroAreRemoved) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(-15.123450000, -0.120000000);
std::string expected =
"RESULT: foo / psnr= "
"{mean=-15.12345, stddev=-0.12} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
NegativeNumberRoundsUpOnPrecisionCorrectly) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(-15.000000009, -0.999999999);
std::string expected =
"RESULT: foo / psnr= "
"{mean=-15.00000001, stddev=-1} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
TEST(StdoutMetricsExporterNumberFormatTest,
NegativeNumberRoundsDownOnPrecisionCorrectly) {
testing::internal::CaptureStdout();
StdoutMetricsExporter exporter;
Metric metric = PsnrForTestFoo(-15.0000000049, -0.9999999949);
std::string expected =
"RESULT: foo / psnr= "
"{mean=-15, stddev=-0.99999999} Unitless (BiggerIsBetter)\n";
EXPECT_TRUE(exporter.Export(std::vector<Metric>{metric}));
EXPECT_EQ(expected, testing::internal::GetCapturedStdout());
}
} // namespace
} // namespace test
} // namespace webrtc