|
13 | 13 | from collections.abc import Callable, Iterable, Sequence
|
14 | 14 | from functools import _compose_mro, partial # type: ignore
|
15 | 15 | from itertools import chain
|
16 |
| -from typing import TYPE_CHECKING, Literal |
| 16 | +from typing import Literal |
17 | 17 |
|
18 |
| -import pytensor |
19 | 18 | from pytensor.configdefaults import config
|
20 | 19 | from pytensor.graph import destroyhandler as dh
|
21 | 20 | from pytensor.graph.basic import (
|
|
30 | 29 | from pytensor.graph.features import AlreadyThere, Feature
|
31 | 30 | from pytensor.graph.fg import FunctionGraph, Output
|
32 | 31 | from pytensor.graph.op import Op
|
| 32 | +from pytensor.graph.rewriting.unify import Var, convert_strs_to_vars |
33 | 33 | from pytensor.graph.utils import AssocList, InconsistencyError
|
34 | 34 | from pytensor.misc.ordered_set import OrderedSet
|
35 | 35 | from pytensor.utils import flatten
|
36 | 36 |
|
37 | 37 |
|
38 |
| -if TYPE_CHECKING: |
39 |
| - from pytensor.graph.rewriting.unify import Var |
40 |
| - |
41 |
| - |
42 | 38 | _logger = logging.getLogger("pytensor.graph.rewriting.basic")
|
43 | 39 |
|
44 | 40 | RemoveKeyType = Literal["remove"]
|
@@ -1406,8 +1402,6 @@ def __init__(
|
1406 | 1402 | frequent `Op`, which will prevent the rewrite from being tried as often.
|
1407 | 1403 |
|
1408 | 1404 | """
|
1409 |
| - from pytensor.graph.rewriting.unify import convert_strs_to_vars |
1410 |
| - |
1411 | 1405 | var_map: dict[str, Var] = {}
|
1412 | 1406 | self.in_pattern = convert_strs_to_vars(in_pattern, var_map=var_map)
|
1413 | 1407 | self.out_pattern = convert_strs_to_vars(out_pattern, var_map=var_map)
|
@@ -1449,9 +1443,6 @@ def transform(self, fgraph, node, get_nodes=True):
|
1449 | 1443 | if ret is not False and ret is not None:
|
1450 | 1444 | return dict(zip(real_node.outputs, ret, strict=True))
|
1451 | 1445 |
|
1452 |
| - if node.op != self.op: |
1453 |
| - return False |
1454 |
| - |
1455 | 1446 | if len(node.outputs) != 1:
|
1456 | 1447 | # PatternNodeRewriter doesn't support replacing multi-output nodes
|
1457 | 1448 | return False
|
@@ -1480,11 +1471,13 @@ def transform(self, fgraph, node, get_nodes=True):
|
1480 | 1471 |
|
1481 | 1472 | [old_out] = node.outputs
|
1482 | 1473 | if not old_out.type.is_super(ret.type):
|
| 1474 | + from pytensor.tensor.type import TensorType |
| 1475 | + |
1483 | 1476 | # Type doesn't match
|
1484 | 1477 | if not (
|
1485 | 1478 | self.allow_cast
|
1486 |
| - and isinstance(old_out.type, pytensor.tensor.TensorType) |
1487 |
| - and isinstance(ret.type, pytensor.tensor.TensorType) |
| 1479 | + and isinstance(old_out.type, TensorType) |
| 1480 | + and isinstance(ret.type, TensorType) |
1488 | 1481 | ):
|
1489 | 1482 | return False
|
1490 | 1483 |
|
@@ -2736,10 +2729,12 @@ def check_stack_trace(f_or_fgraph, ops_to_check="last", bug_print="raise"):
|
2736 | 2729 | otherwise.
|
2737 | 2730 |
|
2738 | 2731 | """
|
2739 |
| - if isinstance(f_or_fgraph, pytensor.compile.function.types.Function): |
2740 |
| - fgraph = f_or_fgraph.maker.fgraph |
2741 |
| - elif isinstance(f_or_fgraph, pytensor.graph.fg.FunctionGraph): |
| 2732 | + from pytensor.compile.function.types import Function |
| 2733 | + |
| 2734 | + if isinstance(f_or_fgraph, FunctionGraph): |
2742 | 2735 | fgraph = f_or_fgraph
|
| 2736 | + elif isinstance(f_or_fgraph, Function): |
| 2737 | + fgraph = f_or_fgraph.maker.fgraph |
2743 | 2738 | else:
|
2744 | 2739 | raise ValueError("The type of f_or_fgraph is not supported")
|
2745 | 2740 |
|
|
0 commit comments