Skip to content

Commit

Permalink
Bind benchmark builder to Python (#1040)
Browse files Browse the repository at this point in the history
* Fix setup.py and reformat

* Bind benchmark

* Add benchmark option to Python

* Add Python examples for range, complexity, and thread

* Remove invalid multithreading in Python

* Bump Python bindings version to 0.2.0

Co-authored-by: Dominic Hamon <dominichamon@users.noreply.github.com>
  • Loading branch information
AntoinePrv and dominichamon authored Sep 11, 2020
1 parent df9e294 commit 73d4d5e
Show file tree
Hide file tree
Showing 4 changed files with 270 additions and 59 deletions.
101 changes: 94 additions & 7 deletions bindings/python/google_benchmark/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,24 +29,111 @@ def my_benchmark(state):

from absl import app
from google_benchmark import _benchmark
from google_benchmark._benchmark import Counter
from google_benchmark._benchmark import (
Counter,
kNanosecond,
kMicrosecond,
kMillisecond,
oNone,
o1,
oN,
oNSquared,
oNCubed,
oLogN,
oNLogN,
oAuto,
oLambda,
)


__all__ = [
"register",
"main",
"Counter",
"kNanosecond",
"kMicrosecond",
"kMillisecond",
"oNone",
"o1",
"oN",
"oNSquared",
"oNCubed",
"oLogN",
"oNLogN",
"oAuto",
"oLambda",
]

__version__ = "0.1.0"
__version__ = "0.2.0"


class __OptionMaker:
"""A stateless class to collect benchmark options.
Collect all decorator calls like @option.range(start=0, limit=1<<5).
"""

class Options:
"""Pure data class to store options calls, along with the benchmarked function."""

def __init__(self, func):
self.func = func
self.builder_calls = []

@classmethod
def make(cls, func_or_options):
"""Make Options from Options or the benchmarked function."""
if isinstance(func_or_options, cls.Options):
return func_or_options
return cls.Options(func_or_options)

def __getattr__(self, builder_name):
"""Append option call in the Options."""

# The function that get returned on @option.range(start=0, limit=1<<5).
def __builder_method(*args, **kwargs):

# The decorator that get called, either with the benchmared function
# or the previous Options
def __decorator(func_or_options):
options = self.make(func_or_options)
options.builder_calls.append((builder_name, args, kwargs))
# The decorator returns Options so it is not technically a decorator
# and needs a final call to @regiser
return options

def register(f=None, *, name=None):
if f is None:
return __decorator

return __builder_method


# Alias for nicer API.
# We have to instanciate an object, even if stateless, to be able to use __getattr__
# on option.range
option = __OptionMaker()


def register(undefined=None, *, name=None):
"""Register function for benchmarking."""
if undefined is None:
# Decorator is called without parenthesis so we return a decorator
return lambda f: register(f, name=name)

# We have either the function to benchmark (simple case) or an instance of Options
# (@option._ case).
options = __OptionMaker.make(undefined)

if name is None:
name = f.__name__
_benchmark.RegisterBenchmark(name, f)
return f
name = options.func.__name__

# We register the benchmark and reproduce all the @option._ calls onto the
# benchmark builder pattern
benchmark = _benchmark.RegisterBenchmark(name, options.func)
for name, args, kwargs in options.builder_calls[::-1]:
getattr(benchmark, name)(*args, **kwargs)

# return the benchmarked function because the decorator does not modify it
return options.func


def _flags_parser(argv):
Expand Down
90 changes: 81 additions & 9 deletions bindings/python/google_benchmark/benchmark.cc
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
// Benchmark for Python.

#include "benchmark/benchmark.h"

#include <map>
#include <string>
#include <vector>
Expand All @@ -11,6 +9,8 @@
#include "pybind11/stl.h"
#include "pybind11/stl_bind.h"

#include "benchmark/benchmark.h"

PYBIND11_MAKE_OPAQUE(benchmark::UserCounters);

namespace {
Expand All @@ -37,16 +37,82 @@ std::vector<std::string> Initialize(const std::vector<std::string>& argv) {
return remaining_argv;
}

void RegisterBenchmark(const char* name, py::function f) {
benchmark::RegisterBenchmark(name,
[f](benchmark::State& state) { f(&state); });
benchmark::internal::Benchmark* RegisterBenchmark(const char* name,
py::function f) {
return benchmark::RegisterBenchmark(
name, [f](benchmark::State& state) { f(&state); });
}

PYBIND11_MODULE(_benchmark, m) {
m.def("Initialize", Initialize);
m.def("RegisterBenchmark", RegisterBenchmark);
m.def("RunSpecifiedBenchmarks",
[]() { benchmark::RunSpecifiedBenchmarks(); });
using benchmark::TimeUnit;
py::enum_<TimeUnit>(m, "TimeUnit")
.value("kNanosecond", TimeUnit::kNanosecond)
.value("kMicrosecond", TimeUnit::kMicrosecond)
.value("kMillisecond", TimeUnit::kMillisecond)
.export_values();

using benchmark::BigO;
py::enum_<BigO>(m, "BigO")
.value("oNone", BigO::oNone)
.value("o1", BigO::o1)
.value("oN", BigO::oN)
.value("oNSquared", BigO::oNSquared)
.value("oNCubed", BigO::oNCubed)
.value("oLogN", BigO::oLogN)
.value("oNLogN", BigO::oLogN)
.value("oAuto", BigO::oAuto)
.value("oLambda", BigO::oLambda)
.export_values();

using benchmark::internal::Benchmark;
py::class_<Benchmark>(m, "Benchmark")
// For methods returning a pointer tor the current object, reference
// return policy is used to ask pybind not to take ownership oof the
// returned object and avoid calling delete on it.
// https://pybind11.readthedocs.io/en/stable/advanced/functions.html#return-value-policies
//
// For methods taking a const std::vector<...>&, a copy is created
// because a it is bound to a Python list.
// https://pybind11.readthedocs.io/en/stable/advanced/cast/stl.html
.def("unit", &Benchmark::Unit, py::return_value_policy::reference)
.def("arg", &Benchmark::Arg, py::return_value_policy::reference)
.def("args", &Benchmark::Args, py::return_value_policy::reference)
.def("range", &Benchmark::Range, py::return_value_policy::reference,
py::arg("start"), py::arg("limit"))
.def("dense_range", &Benchmark::DenseRange,
py::return_value_policy::reference, py::arg("start"),
py::arg("limit"), py::arg("step") = 1)
.def("ranges", &Benchmark::Ranges, py::return_value_policy::reference)
.def("args_product", &Benchmark::ArgsProduct,
py::return_value_policy::reference)
.def("arg_name", &Benchmark::ArgName, py::return_value_policy::reference)
.def("arg_names", &Benchmark::ArgNames,
py::return_value_policy::reference)
.def("range_pair", &Benchmark::RangePair,
py::return_value_policy::reference, py::arg("lo1"), py::arg("hi1"),
py::arg("lo2"), py::arg("hi2"))
.def("range_multiplier", &Benchmark::RangeMultiplier,
py::return_value_policy::reference)
.def("min_time", &Benchmark::MinTime, py::return_value_policy::reference)
.def("iterations", &Benchmark::Iterations,
py::return_value_policy::reference)
.def("repetitions", &Benchmark::Repetitions,
py::return_value_policy::reference)
.def("report_aggregates_only", &Benchmark::ReportAggregatesOnly,
py::return_value_policy::reference, py::arg("value") = true)
.def("display_aggregates_only", &Benchmark::DisplayAggregatesOnly,
py::return_value_policy::reference, py::arg("value") = true)
.def("measure_process_cpu_time", &Benchmark::MeasureProcessCPUTime,
py::return_value_policy::reference)
.def("use_real_time", &Benchmark::UseRealTime,
py::return_value_policy::reference)
.def("use_manual_time", &Benchmark::UseManualTime,
py::return_value_policy::reference)
.def(
"complexity",
(Benchmark * (Benchmark::*)(benchmark::BigO)) & Benchmark::Complexity,
py::return_value_policy::reference,
py::arg("complexity") = benchmark::oAuto);

using benchmark::Counter;
py::class_<Counter> py_counter(m, "Counter");
Expand Down Expand Up @@ -104,5 +170,11 @@ PYBIND11_MODULE(_benchmark, m) {
.def_readwrite("counters", &State::counters)
.def_readonly("thread_index", &State::thread_index)
.def_readonly("threads", &State::threads);

m.def("Initialize", Initialize);
m.def("RegisterBenchmark", RegisterBenchmark,
py::return_value_policy::reference);
m.def("RunSpecifiedBenchmarks",
[]() { benchmark::RunSpecifiedBenchmarks(); });
};
} // namespace
42 changes: 41 additions & 1 deletion bindings/python/google_benchmark/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def manual_timing(state):
while state:
# Manually count Python CPU time
start = time.perf_counter() # perf_counter_ns() in Python 3.7+
# Somehting to benchmark
# Something to benchmark
time.sleep(0.01)
end = time.perf_counter()
state.set_iteration_time(end - start)
Expand Down Expand Up @@ -92,5 +92,45 @@ def custom_counters(state):
state.counters["foo_avg_rate"] = Counter(num_foo, Counter.kAvgThreadsRate)


@benchmark.register
@benchmark.option.measure_process_cpu_time()
@benchmark.option.use_real_time()
def with_options(state):
while state:
sum(range(1_000_000))


@benchmark.register(name="sum_million_microseconds")
@benchmark.option.unit(benchmark.kMicrosecond)
def with_options(state):
while state:
sum(range(1_000_000))


@benchmark.register
@benchmark.option.arg(100)
@benchmark.option.arg(1000)
def passing_argument(state):
while state:
sum(range(state.range(0)))


@benchmark.register
@benchmark.option.range(8, limit=8 << 10)
def using_range(state):
while state:
sum(range(state.range(0)))


@benchmark.register
@benchmark.option.range_multiplier(2)
@benchmark.option.range(1 << 10, 1 << 18)
@benchmark.option.complexity(benchmark.oN)
def computing_complexity(state):
while state:
sum(range(state.range(0)))
state.complexity_n = state.range(0)


if __name__ == "__main__":
benchmark.main()
Loading

0 comments on commit 73d4d5e

Please sign in to comment.