|
| 1 | +import warnings |
| 2 | +from copy import deepcopy |
| 3 | +from functools import singledispatch |
| 4 | +from types import NoneType |
| 5 | + |
| 6 | +import mlx.core as mx |
| 7 | +import numpy as np |
| 8 | + |
| 9 | +from pytensor.compile.ops import DeepCopyOp |
| 10 | +from pytensor.graph import Constant |
| 11 | +from pytensor.graph.fg import FunctionGraph |
| 12 | +from pytensor.link.utils import fgraph_to_python |
| 13 | +from pytensor.raise_op import Assert, CheckAndRaise |
| 14 | + |
| 15 | + |
| 16 | +@singledispatch |
| 17 | +def mlx_typify(data, **kwargs): |
| 18 | + raise NotImplementedError(f"mlx_typify is not implemented for {type(data)}") |
| 19 | + |
| 20 | + |
| 21 | +@mlx_typify.register(np.ndarray) |
| 22 | +def mlx_typify_tensor(data, dtype=None, **kwargs): |
| 23 | + return mx.array(data, dtype=dtype) |
| 24 | + |
| 25 | + |
| 26 | +@mlx_typify.register(slice) |
| 27 | +@mlx_typify.register(NoneType) |
| 28 | +@mlx_typify.register(mx.array) |
| 29 | +def mlx_typify_no_conversion_needed(data, **kwargs): |
| 30 | + return data |
| 31 | + |
| 32 | + |
| 33 | +@mlx_typify.register(int) |
| 34 | +@mlx_typify.register(float) |
| 35 | +def mlx_typify_python_scalar(data, **kwargs): |
| 36 | + return mx.array(data) |
| 37 | + |
| 38 | + |
| 39 | +@mlx_typify.register(bool) |
| 40 | +@mlx_typify.register(np.bool_) |
| 41 | +def mlx_typify_bool(data, **kwargs): |
| 42 | + return bool(data) |
| 43 | + |
| 44 | + |
| 45 | +@mlx_typify.register(np.integer) |
| 46 | +@mlx_typify.register(np.floating) |
| 47 | +@mlx_typify.register(np.complexfloating) |
| 48 | +def mlx_typify_numpy_scalar(data, **kwargs): |
| 49 | + return mx.array(data) |
| 50 | + |
| 51 | + |
| 52 | +@singledispatch |
| 53 | +def mlx_funcify(op, node=None, storage_map=None, **kwargs): |
| 54 | + """Create a MLX compatible function from an PyTensor `Op`.""" |
| 55 | + raise NotImplementedError( |
| 56 | + f"No MLX conversion for the given `Op`: {op}.\nCheck out `https://github.com/pymc-devs/pytensor/issues/1350` for progress or to request we prioritize this operation" |
| 57 | + ) |
| 58 | + |
| 59 | + |
| 60 | +@mlx_funcify.register(FunctionGraph) |
| 61 | +def mlx_funcify_FunctionGraph( |
| 62 | + fgraph, |
| 63 | + node=None, |
| 64 | + fgraph_name="mlx_funcified_fgraph", |
| 65 | + conversion_func=mlx_funcify, |
| 66 | + **kwargs, |
| 67 | +): |
| 68 | + built_kwargs = {"conversion_func": conversion_func, **kwargs} |
| 69 | + return fgraph_to_python( |
| 70 | + fgraph, |
| 71 | + conversion_func, |
| 72 | + type_conversion_fn=mlx_typify, |
| 73 | + fgraph_name=fgraph_name, |
| 74 | + **built_kwargs, |
| 75 | + ) |
| 76 | + |
| 77 | + |
| 78 | +@mlx_funcify.register(DeepCopyOp) |
| 79 | +def mlx_funcify_DeepCopyOp(op, **kwargs): |
| 80 | + def deepcopyop(x): |
| 81 | + return deepcopy(x) |
| 82 | + |
| 83 | + return deepcopyop |
| 84 | + |
| 85 | + |
| 86 | +@mlx_funcify.register(Assert) |
| 87 | +@mlx_funcify.register(CheckAndRaise) |
| 88 | +def mlx_funcify_CheckAndRaise(op, node, **kwargs): |
| 89 | + conds = node.inputs[1:] |
| 90 | + if any(isinstance(cond, Constant) and not bool(cond.data) for cond in conds): |
| 91 | + raise op.exc_type(op.msg) |
| 92 | + |
| 93 | + warnings.warn( |
| 94 | + f"""Skipping `{type(op).__name__}` Op (assertion: {op.msg}) as MLX tracing would remove it.""", |
| 95 | + stacklevel=2, |
| 96 | + ) |
| 97 | + |
| 98 | + def assert_fn(x, *inputs): |
| 99 | + return x |
| 100 | + |
| 101 | + return assert_fn |
0 commit comments