-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbench.py
154 lines (108 loc) · 3.84 KB
/
bench.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
import inspect
import itertools
from collections.abc import Callable, Iterator
from dataclasses import dataclass
from decimal import Decimal
from statistics import mean
from timeit import timeit
from typing import Annotated, Any, ClassVar, Self
from tabulate import tabulate
from typer import Option, Typer
from injection import inject, injectable
@dataclass(frozen=True, slots=True)
class Benchmark:
x: Decimal
y: Decimal
@property
def difference_rate(self) -> Decimal:
return (self.y - self.x) / self.x
@classmethod
def compare(
cls,
x: Callable[..., Any],
y: Callable[..., Any],
number: int = 1,
) -> Self:
time_x = mean(cls._time_in_microseconds(x, number))
time_y = mean(cls._time_in_microseconds(y, number))
return cls(time_x, time_y)
@staticmethod
def _time_in_microseconds(
callable_: Callable[..., Any],
number: int,
) -> Iterator[Decimal]:
for _ in range(number):
delta = timeit(callable_, number=1)
yield Decimal(delta) * (10**6)
@dataclass(frozen=True, slots=True)
class BenchmarkResult:
title: str
benchmark: Benchmark
@property
def row(self) -> tuple[str, str, str, str]:
rate = self.benchmark.difference_rate
return (
self.title,
f"{self.benchmark.x:.2f}μs",
f"{self.benchmark.y:.2f}μs",
f"{rate:.2f} times slower"
if rate >= 0
else f"{abs(rate):.2f} times faster",
)
@dataclass(frozen=True, slots=True)
class InjectBenchmark:
callables: ClassVar[dict[str, Callable[..., Any]]] = {}
def run(self, number: int = 1) -> Iterator[BenchmarkResult]:
for title, callable_ in self.callables.items():
signature = inspect.signature(callable_, eval_str=True)
dependencies = {
name: parameter.annotation
for name, parameter in signature.parameters.items()
}
def reference():
return callable_(**{name: d() for name, d in dependencies.items()})
first = Benchmark.compare(reference, lambda: inject(callable_)(), number)
yield BenchmarkResult(f"{title} (first run)", first)
injected = inject(callable_)
injected()
instance = Benchmark.compare(reference, injected, number)
yield BenchmarkResult(title, instance)
@classmethod
def register(cls, wrapped: Callable[..., Any] = None, /, *, title: str):
def decorator(wp):
cls.callables[title] = wp
return wp
return decorator(wrapped) if wrapped else decorator
@injectable
class A: ...
@injectable
class B: ...
@injectable
class C: ...
@injectable
class D: ...
@injectable
class E: ...
@InjectBenchmark.register(title="0 dependency")
def function_with_0_dependency(): ...
@InjectBenchmark.register(title="1 dependency")
def function_with_1_dependency(__a: A): ...
@InjectBenchmark.register(title="2 dependencies")
def function_with_2_dependencies(__a: A, __b: B): ...
@InjectBenchmark.register(title="3 dependencies")
def function_with_3_dependencies(__a: A, __b: B, __c: C): ...
@InjectBenchmark.register(title="4 dependencies")
def function_with_4_dependencies(__a: A, __b: B, __c: C, __d: D): ...
@InjectBenchmark.register(title="5 dependencies")
def function_with_5_dependencies(__a: A, __b: B, __c: C, __d: D, __e: E): ...
cli = Typer()
@cli.command()
def main(number: Annotated[int, Option("--number", "-n", min=0)] = 1000):
benchmark = InjectBenchmark()
results = benchmark.run(number)
headers = ("", "Reference Time (μs)", "@inject Time (μs)", "Difference Rate (×)")
data = (result.row for result in itertools.chain(results))
table = tabulate(data, headers=headers)
print(table)
if __name__ == "__main__":
cli()