forked from torch-points3d/torch-points3d
-
Notifications
You must be signed in to change notification settings - Fork 47
/
Copy pathtimer.py
53 lines (41 loc) · 1.63 KB
/
timer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from time import time
from collections import defaultdict
import functools
from .running_stats import RunningStats
FunctionStats: defaultdict = defaultdict(RunningStats)
def time_func(*outer_args, **outer_kwargs):
print_rec = outer_kwargs.get("print_rec", 100)
measure_runtime = outer_kwargs.get("measure_runtime", False)
name = outer_kwargs.get("name", "")
def time_func_inner(func):
@functools.wraps(func)
def func_wrapper(*args, **kwargs):
if measure_runtime:
func_name = name if name else func.__name__
if FunctionStats.get(func_name, None) is not None:
if FunctionStats[func_name].n % print_rec == 0:
stats = FunctionStats[func_name]
stats_mean = stats.mean()
print(
"{} run in {} | {} over {} runs".format(
func_name, stats_mean, stats_mean * stats.n, stats.n
)
)
# print('{} run in {} +/- {} over {} runs'.format(func.__name__, stats.mean(), stats.std(), stats.n))
t0 = time()
out = func(*args, **kwargs)
diff = time() - t0
FunctionStats[func_name].push(diff)
return out
else:
return func(*args, **kwargs)
return func_wrapper
return time_func_inner
@time_func(print_rec=50, measure_runtime=True)
def do_nothing():
pass
def iteration():
for _ in range(10000):
do_nothing()
if __name__ == "__main__":
iteration()