Skip to content

Commit 671a821

Browse files
twieckiredbopo
andauthored
Downstream 1312 (#34)
* Replace time.time with time.perf_counter Co-authored-by: redbopo <[email protected]>
1 parent 4deacac commit 671a821

File tree

22 files changed

+144
-142
lines changed

22 files changed

+144
-142
lines changed

pytensor/compile/profiling.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def extended_open(filename, mode="r"):
4343

4444
logger = logging.getLogger("pytensor.compile.profiling")
4545

46-
pytensor_imported_time: float = time.time()
46+
pytensor_imported_time: float = time.perf_counter()
4747
total_fct_exec_time: float = 0.0
4848
total_graph_rewrite_time: float = 0.0
4949
total_time_linker: float = 0.0
@@ -165,7 +165,7 @@ def print_global_stats():
165165
print(
166166
(
167167
"Global stats: ",
168-
f"Time elasped since PyTensor import = {time.time() - pytensor_imported_time:6.3f}s, "
168+
f"Time elasped since PyTensor import = {time.perf_counter() - pytensor_imported_time:6.3f}s, "
169169
f"Time spent in PyTensor functions = {total_fct_exec_time:6.3f}s, "
170170
"Time spent compiling PyTensor functions: "
171171
f"rewriting = {total_graph_rewrite_time:6.3f}s, linking = {total_time_linker:6.3f}s ",
@@ -831,7 +831,7 @@ def summary_globals(self, file):
831831
f"Time in all call to pytensor.grad() {pytensor.gradient.grad_time:e}s",
832832
file=file,
833833
)
834-
total_time = time.time() - pytensor_imported_time
834+
total_time = time.perf_counter() - pytensor_imported_time
835835
print(f"Time since pytensor import {total_time:.3f}s", file=file)
836836

837837
def summary_memory(self, file, N=None):
@@ -1299,9 +1299,9 @@ def compute_max_stats(running_memory, stats):
12991299
# Config: whether print min memory peak
13001300
if config.profiling__min_peak_memory:
13011301
node_list = fgraph.apply_nodes
1302-
ttt = time.time()
1302+
ttt = time.perf_counter()
13031303
min_peak = count_minimum_peak(node_list, fgraph, nodes_mem)
1304-
min_peak_time += time.time() - ttt
1304+
min_peak_time += time.perf_counter() - ttt
13051305
min_max_peak = max(min_max_peak, min_peak)
13061306

13071307
del fgraph, nodes_mem

pytensor/gradient.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -492,7 +492,7 @@ def grad(
492492
respect to the output, then a zero variable is returned.
493493
494494
"""
495-
t0 = time.time()
495+
t0 = time.perf_counter()
496496

497497
if cost is None:
498498
if known_grads is None:
@@ -643,7 +643,7 @@ def handle_disconnected(var):
643643
else:
644644
assert return_disconnected.lower() == "disconnected"
645645

646-
t1 = time.time()
646+
t1 = time.perf_counter()
647647
global grad_time
648648
grad_time += t1 - t0
649649

pytensor/graph/features.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -473,7 +473,7 @@ def validate_(self, fgraph):
473473
exception. replace_all_validate will print out the
474474
verbose output. Or it has to be done here before raise.
475475
"""
476-
t0 = time.time()
476+
t0 = time.perf_counter()
477477
try:
478478
ret = fgraph.execute_callbacks("validate")
479479
except Exception as e:
@@ -494,7 +494,7 @@ def validate_(self, fgraph):
494494
reason = uf_info.function
495495
print(f"validate failed on node {r}.\n Reason: {reason}, {e}")
496496
raise
497-
t1 = time.time()
497+
t1 = time.perf_counter()
498498
if fgraph.profile:
499499
fgraph.profile.validate_time += t1 - t0
500500
return ret

pytensor/graph/fg.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -717,7 +717,7 @@ def execute_callbacks(self, name: str, *args, **kwargs) -> None:
717717
a method called after name.
718718
719719
"""
720-
t0 = time.time()
720+
t0 = time.perf_counter()
721721
for feature in self._features:
722722
try:
723723
fn = getattr(feature, name)
@@ -726,10 +726,10 @@ def execute_callbacks(self, name: str, *args, **kwargs) -> None:
726726
# try; the AttributeError really must come from feature.${name}
727727
# not existing
728728
continue
729-
tf0 = time.time()
729+
tf0 = time.perf_counter()
730730
fn(self, *args, **kwargs)
731-
self.execute_callbacks_times[feature] += time.time() - tf0
732-
self.execute_callbacks_time += time.time() - t0
731+
self.execute_callbacks_times[feature] += time.perf_counter() - tf0
732+
self.execute_callbacks_time += time.perf_counter() - t0
733733

734734
def collect_callbacks(self, name: str, *args) -> Dict[Feature, Any]:
735735
"""Collects callbacks

pytensor/graph/rewriting/basic.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -298,9 +298,9 @@ def apply(self, fgraph):
298298
for rewriter in self.data:
299299
try:
300300
nb_nodes_before = len(fgraph.apply_nodes)
301-
t0 = time.time()
301+
t0 = time.perf_counter()
302302
sub_prof = rewriter.apply(fgraph)
303-
l.append(float(time.time() - t0))
303+
l.append(float(time.perf_counter() - t0))
304304
sub_profs.append(sub_prof)
305305
nb_nodes.append((nb_nodes_before, len(fgraph.apply_nodes)))
306306
if fgraph.profile:
@@ -701,7 +701,7 @@ def add_requirements(self, fgraph):
701701
def apply(self, fgraph):
702702
sched = fgraph.merge_feature.scheduled
703703
nb_fail = 0
704-
t0 = time.time()
704+
t0 = time.perf_counter()
705705
if fgraph.profile:
706706
validate_before = fgraph.profile.validate_time
707707
callback_before = fgraph.execute_callbacks_time
@@ -807,7 +807,7 @@ def apply(self, fgraph):
807807

808808
return (
809809
nb_fail,
810-
time.time() - t0,
810+
time.perf_counter() - t0,
811811
validate_time,
812812
callback_time,
813813
callbacks_time,
@@ -1066,9 +1066,9 @@ def get_rewrites(self, node):
10661066
return self.track_dict[type(node.op)]
10671067

10681068
def time_call(self, fn):
1069-
start = time.time()
1069+
start = time.perf_counter()
10701070
fn()
1071-
return time.time() - start
1071+
return time.perf_counter() - start
10721072

10731073

10741074
class FromFunctionNodeRewriter(NodeRewriter):
@@ -1303,9 +1303,9 @@ def transform(self, fgraph, node):
13031303

13041304
new_repl = None
13051305
for rewrite in rewrites:
1306-
rewrite_start = time.time()
1306+
rewrite_start = time.perf_counter()
13071307
new_repl = rewrite.transform(fgraph, node)
1308-
rewrite_finish = time.time()
1308+
rewrite_finish = time.perf_counter()
13091309
if self.profile:
13101310
self.time_rewrites[rewrite] += rewrite_start - rewrite_finish
13111311
self.process_count[rewrite] += 1
@@ -2026,9 +2026,9 @@ def apply(self, fgraph, start_from=None):
20262026
start_from = fgraph.outputs
20272027
callback_before = fgraph.execute_callbacks_time
20282028
nb_nodes_start = len(fgraph.apply_nodes)
2029-
t0 = time.time()
2029+
t0 = time.perf_counter()
20302030
q = deque(io_toposort(fgraph.inputs, start_from))
2031-
io_t = time.time() - t0
2031+
io_t = time.perf_counter() - t0
20322032

20332033
def importer(node):
20342034
if node is not current_node:
@@ -2039,7 +2039,7 @@ def importer(node):
20392039
)
20402040
nb = 0
20412041
try:
2042-
t0 = time.time()
2042+
t0 = time.perf_counter()
20432043
while q:
20442044
if self.order == "out_to_in":
20452045
node = q.pop()
@@ -2049,7 +2049,7 @@ def importer(node):
20492049
continue
20502050
current_node = node
20512051
nb += self.process_node(fgraph, node)
2052-
loop_t = time.time() - t0
2052+
loop_t = time.perf_counter() - t0
20532053
finally:
20542054
self.detach_updater(fgraph, u)
20552055

@@ -2367,9 +2367,9 @@ def apply_cleanup(profs_dict):
23672367
for crewriter in self.cleanup_rewriters:
23682368
change_tracker.reset()
23692369
nb = change_tracker.nb_imported
2370-
t_rewrite = time.time()
2370+
t_rewrite = time.perf_counter()
23712371
sub_prof = crewriter.apply(fgraph)
2372-
time_rewriters[crewriter] += time.time() - t_rewrite
2372+
time_rewriters[crewriter] += time.perf_counter() - t_rewrite
23732373
profs_dict[crewriter].append(sub_prof)
23742374
if change_tracker.changed:
23752375
process_count.setdefault(crewriter, 0)
@@ -2381,7 +2381,7 @@ def apply_cleanup(profs_dict):
23812381

23822382
while changed and not max_use_abort:
23832383
process_count = {}
2384-
t0 = time.time()
2384+
t0 = time.perf_counter()
23852385
changed = False
23862386
iter_cleanup_sub_profs = {}
23872387
for crewrite in self.cleanup_rewriters:
@@ -2392,9 +2392,9 @@ def apply_cleanup(profs_dict):
23922392
for grewrite in self.global_rewriters:
23932393
change_tracker.reset()
23942394
nb = change_tracker.nb_imported
2395-
t_rewrite = time.time()
2395+
t_rewrite = time.perf_counter()
23962396
sub_prof = grewrite.apply(fgraph)
2397-
time_rewriters[grewrite] += time.time() - t_rewrite
2397+
time_rewriters[grewrite] += time.perf_counter() - t_rewrite
23982398
sub_profs.append(sub_prof)
23992399
if change_tracker.changed:
24002400
process_count.setdefault(grewrite, 0)
@@ -2409,13 +2409,13 @@ def apply_cleanup(profs_dict):
24092409
)
24102410
global_sub_profs.append(sub_profs)
24112411

2412-
global_rewriter_timing.append(float(time.time() - t0))
2412+
global_rewriter_timing.append(float(time.perf_counter() - t0))
24132413

24142414
changed |= apply_cleanup(iter_cleanup_sub_profs)
24152415

2416-
topo_t0 = time.time()
2416+
topo_t0 = time.perf_counter()
24172417
q = deque(io_toposort(fgraph.inputs, start_from))
2418-
io_toposort_timing.append(time.time() - topo_t0)
2418+
io_toposort_timing.append(time.perf_counter() - topo_t0)
24192419

24202420
nb_nodes.append(len(q))
24212421
max_nb_nodes = max(max_nb_nodes, len(q))
@@ -2443,11 +2443,11 @@ def chin(node, i, r, new_r, reason):
24432443
current_node = node
24442444
for node_rewriter in self.node_tracker.get_trackers(node.op):
24452445
nb = change_tracker.nb_imported
2446-
t_rewrite = time.time()
2446+
t_rewrite = time.perf_counter()
24472447
node_rewriter_change = self.process_node(
24482448
fgraph, node, node_rewriter
24492449
)
2450-
time_rewriters[node_rewriter] += time.time() - t_rewrite
2450+
time_rewriters[node_rewriter] += time.perf_counter() - t_rewrite
24512451
if not node_rewriter_change:
24522452
continue
24532453
process_count.setdefault(node_rewriter, 0)
@@ -2469,13 +2469,13 @@ def chin(node, i, r, new_r, reason):
24692469

24702470
# Apply final rewriters
24712471
sub_profs = []
2472-
t_before_final_rewrites = time.time()
2472+
t_before_final_rewrites = time.perf_counter()
24732473
for grewrite in self.final_rewriters:
24742474
change_tracker.reset()
24752475
nb = change_tracker.nb_imported
2476-
t_rewrite = time.time()
2476+
t_rewrite = time.perf_counter()
24772477
sub_prof = grewrite.apply(fgraph)
2478-
time_rewriters[grewrite] += time.time() - t_rewrite
2478+
time_rewriters[grewrite] += time.perf_counter() - t_rewrite
24792479
sub_profs.append(sub_prof)
24802480
if change_tracker.changed:
24812481
process_count.setdefault(grewrite, 0)
@@ -2490,7 +2490,7 @@ def chin(node, i, r, new_r, reason):
24902490
)
24912491
final_sub_profs.append(sub_profs)
24922492

2493-
global_rewriter_timing[-1] += time.time() - t_before_final_rewrites
2493+
global_rewriter_timing[-1] += time.perf_counter() - t_before_final_rewrites
24942494

24952495
changed |= apply_cleanup(iter_cleanup_sub_profs)
24962496

@@ -2504,7 +2504,7 @@ def chin(node, i, r, new_r, reason):
25042504
cleanup_sub_profs.append(c_sub_profs)
25052505

25062506
loop_process_count.append(process_count)
2507-
loop_timing.append(float(time.time() - t0))
2507+
loop_timing.append(float(time.perf_counter() - t0))
25082508

25092509
end_nb_nodes = len(fgraph.apply_nodes)
25102510

pytensor/link/c/cmodule.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -326,11 +326,11 @@ def dlimport(fullpath, suffix=None):
326326
global import_time
327327
try:
328328
importlib.invalidate_caches()
329-
t0 = time.time()
329+
t0 = time.perf_counter()
330330
with warnings.catch_warnings():
331331
warnings.filterwarnings("ignore", message="numpy.ndarray size changed")
332332
rval = __import__(module_name, {}, {}, [module_name])
333-
t1 = time.time()
333+
t1 = time.perf_counter()
334334
import_time += t1 - t0
335335
if not rval:
336336
raise Exception("__import__ failed", fullpath)
@@ -771,7 +771,7 @@ def refresh(self, age_thresh_use=None, delete_if_problem=False, cleanup=True):
771771
"""
772772
if age_thresh_use is None:
773773
age_thresh_use = self.age_thresh_use
774-
start_time = time.time()
774+
start_time = time.perf_counter()
775775
too_old_to_use = []
776776

777777
to_delete = []
@@ -786,7 +786,7 @@ def rmtree_empty(*args, **kwargs):
786786
to_delete_empty.append((args, kwargs))
787787

788788
# add entries that are not in the entry_from_key dictionary
789-
time_now = time.time()
789+
time_now = time.perf_counter()
790790
# Go through directories in alphabetical order to ensure consistent
791791
# behavior.
792792
try:
@@ -956,7 +956,7 @@ def unpickle_failure():
956956
# directories in alphabetical order so as to make
957957
# sure all new processes only use the first one.
958958
if cleanup:
959-
age = time.time() - last_access_time(entry)
959+
age = time.perf_counter() - last_access_time(entry)
960960
if delete_if_problem or age > self.age_thresh_del:
961961
rmtree(
962962
root,
@@ -1063,7 +1063,9 @@ def unpickle_failure():
10631063
if not files:
10641064
_rmtree(*a, **kw)
10651065

1066-
_logger.debug(f"Time needed to refresh cache: {time.time() - start_time}")
1066+
_logger.debug(
1067+
f"Time needed to refresh cache: {time.perf_counter() - start_time}"
1068+
)
10671069

10681070
return too_old_to_use
10691071

@@ -1269,7 +1271,7 @@ def check_key(self, key, key_pkl):
12691271
Its associated pickled file containing a KeyData.
12701272
12711273
"""
1272-
start_time = time.time()
1274+
start_time = time.perf_counter()
12731275
# Verify that when we reload the KeyData from the pickled file, the
12741276
# same key can be found in it, and is not equal to more than one
12751277
# other key.
@@ -1317,7 +1319,7 @@ def check_key(self, key, key_pkl):
13171319
f"The keys are:\n {other}\nand\n {key}\n(found in {key_pkl})."
13181320
)
13191321

1320-
self.time_spent_in_check_key += time.time() - start_time
1322+
self.time_spent_in_check_key += time.perf_counter() - start_time
13211323

13221324
# default 31 days
13231325
age_thresh_del = config.cmodule__age_thresh_use + 60 * 60 * 24 * 7
@@ -1506,7 +1508,7 @@ def clear_unversioned(self, min_age=None):
15061508
assert key[0]
15071509

15081510
to_del = []
1509-
time_now = time.time()
1511+
time_now = time.perf_counter()
15101512
for filename in os.listdir(self.dirname):
15111513
if filename.startswith("tmp"):
15121514
try:

0 commit comments

Comments
 (0)