Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions dace/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,9 @@ def __init__(self,

self.members = OrderedDict(members)
for k, v in self.members.items():
if isinstance(v, dtypes.typeclass):
v = Scalar(v)
self.members[k] = v
v.transient = transient

self.name = name
Expand All @@ -402,6 +405,8 @@ def __init__(self,
elif isinstance(v, Scalar):
symbols |= v.free_symbols
fields_and_types[k] = v.dtype
elif isinstance(v, dtypes.typeclass):
fields_and_types[k] = v
elif isinstance(v, (sp.Basic, symbolic.SymExpr)):
symbols |= v.free_symbols
fields_and_types[k] = symbolic.symtype(v)
Expand Down
18 changes: 12 additions & 6 deletions dace/frontend/python/newast.py
Original file line number Diff line number Diff line change
Expand Up @@ -1852,7 +1852,7 @@ def _parse_map_inputs(self, name: str, params: List[Tuple[str, str]],
if symbolic.issymbolic(atom, self.sdfg.constants):
# Check for undefined variables
atomstr = str(atom)
if atomstr not in self.defined:
if atomstr not in self.defined and atomstr not in self.sdfg.arrays:
raise DaceSyntaxError(self, node, 'Undefined variable "%s"' % atom)
# Add to global SDFG symbols

Expand Down Expand Up @@ -2350,7 +2350,7 @@ def visit_For(self, node: ast.For):
if symbolic.issymbolic(atom, self.sdfg.constants):
astr = str(atom)
# Check for undefined variables
if astr not in self.defined:
if astr not in self.defined and not ('.' in astr and astr in self.sdfg.arrays):
raise DaceSyntaxError(self, node, 'Undefined variable "%s"' % atom)
# Add to global SDFG symbols if not a scalar
if (astr not in self.sdfg.symbols and not (astr in self.variables or astr in self.sdfg.arrays)):
Expand Down Expand Up @@ -3079,8 +3079,14 @@ def _add_access(
else:
var_name = self.sdfg.temp_data_name()

parent_name = self.scope_vars[name]
parent_array = self.scope_arrays[parent_name]
parent_name = self.scope_vars[until(name, '.')]
if '.' in name:
struct_field = name[name.index('.'):]
parent_name += struct_field
scope_ndict = dace.sdfg.NestedDict(self.scope_arrays)
parent_array = scope_ndict[parent_name]
else:
parent_array = self.scope_arrays[parent_name]

has_indirection = (_subset_has_indirection(rng, self) or _subset_is_local_symbol_dependent(rng, self))
if has_indirection:
Expand Down Expand Up @@ -3244,7 +3250,7 @@ def _add_write_access(self,
return self.accesses[(name, rng, 'w')]
elif name in self.variables:
return (self.variables[name], rng)
elif (name, rng, 'r') in self.accesses or name in self.scope_vars:
elif (name, rng, 'r') in self.accesses or until(name, '.') in self.scope_vars:
return self._add_access(name, rng, 'w', target, new_name, arr_type)
else:
raise NotImplementedError
Expand Down Expand Up @@ -3498,7 +3504,7 @@ def _visit_assign(self, node, node_target, op, dtype=None, is_return=False):
raise IndexError('Boolean array indexing cannot be combined with indirect access')

if self.nested and not new_data:
new_name, new_rng = self._add_write_access(name, rng, target)
new_name, new_rng = self._add_write_access(true_name, rng, target)
# Local symbol or local data dependent
if _subset_is_local_symbol_dependent(rng, self):
new_rng = rng
Expand Down
8 changes: 8 additions & 0 deletions dace/symbolic.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,6 +307,8 @@ def symlist(values):
except TypeError:
values = [values]

skip = set()

for expr in values:
if isinstance(expr, SymExpr):
true_expr = expr.expr
Expand All @@ -315,6 +317,12 @@ def symlist(values):
else:
continue
for atom in sympy.preorder_traversal(true_expr):
if atom in skip:
continue
if isinstance(atom, Attr):
# Skip attributes
skip.add(atom.args[1])
continue
if isinstance(atom, symbol):
result[atom.name] = atom
return result
Expand Down
3 changes: 2 additions & 1 deletion tests/codegen/allocation_lifetime_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ def persistentmem(output: dace.int32[1]):
del csdfg


@pytest.mark.skip(reason="In v1, produces two tasklets side-by-side, leading to nondeterministic code order")
def test_alloc_persistent_threadlocal():

@dace.program
Expand Down Expand Up @@ -599,7 +600,7 @@ def test_multisize():
test_persistent_gpu_transpose_regression()
test_alloc_persistent_register()
test_alloc_persistent()
test_alloc_persistent_threadlocal()
# test_alloc_persistent_threadlocal()
test_alloc_persistent_threadlocal_naming()
test_alloc_multistate()
test_nested_view_samename()
Expand Down
18 changes: 18 additions & 0 deletions tests/python_frontend/loops_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from dace.frontend.python.common import DaceSyntaxError


@dace.program
def for_loop():
A = dace.ndarray([10], dtype=dace.int32)
Expand Down Expand Up @@ -499,6 +500,22 @@ def test_branch_in_while():
assert len(sdfg.source_nodes()) == 1


def test_for_with_field():
struct = dace.data.Structure({'data': dace.float64[20], 'length': dace.int32}, name='MyStruct')

@dace.program
def for_with_field(S: struct):
for i in range(S.length):
S.data[i] = S.data[i] + 1.0

A = np.random.rand(20)
inp_struct = struct.dtype.base_type.as_ctypes()(data=A.__array_interface__['data'][0], length=10)
expected = np.copy(A)
expected[:10] += 1.0
for_with_field.compile()(S=inp_struct)
assert np.allclose(A, expected)


if __name__ == "__main__":
test_for_loop()
test_for_loop_with_break_continue()
Expand All @@ -522,3 +539,4 @@ def test_branch_in_while():
test_while_else()
test_branch_in_for()
test_branch_in_while()
test_for_with_field()
139 changes: 114 additions & 25 deletions tests/python_frontend/structures/structure_python_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
# Copyright 2019-2023 ETH Zurich and the DaCe authors. All rights reserved.
import ctypes
import dace
import numpy as np
import pytest
Expand All @@ -18,7 +19,7 @@ def csr_to_dense_python(A: CSR, B: dace.float32[M, N]):
for i in dace.map[0:M]:
for idx in dace.map[A.indptr[i]:A.indptr[i + 1]]:
B[i, A.indices[idx]] = A.data[idx]

rng = np.random.default_rng(42)
A = sparse.random(20, 20, density=0.1, format='csr', dtype=np.float32, random_state=rng)
B = np.zeros((20, 20), dtype=np.float32)
Expand All @@ -41,7 +42,7 @@ def test_write_structure():
M, N, nnz = (dace.symbol(s) for s in ('M', 'N', 'nnz'))
CSR = dace.data.Structure(dict(indptr=dace.int32[M + 1], indices=dace.int32[nnz], data=dace.float32[nnz]),
name='CSRMatrix')

@dace.program
def dense_to_csr_python(A: dace.float32[M, N], B: CSR):
idx = 0
Expand All @@ -53,7 +54,7 @@ def dense_to_csr_python(A: dace.float32[M, N], B: CSR):
B.indices[idx] = j
idx += 1
B.indptr[M] = idx

rng = np.random.default_rng(42)
tmp = sparse.random(20, 20, density=0.1, format='csr', dtype=np.float32, random_state=rng)
A = tmp.toarray()
Expand All @@ -75,7 +76,7 @@ def test_local_structure():
M, N, nnz = (dace.symbol(s) for s in ('M', 'N', 'nnz'))
CSR = dace.data.Structure(dict(indptr=dace.int32[M + 1], indices=dace.int32[nnz], data=dace.float32[nnz]),
name='CSRMatrix')

@dace.program
def dense_to_csr_local_python(A: dace.float32[M, N], B: CSR):
tmp = dace.define_local_structure(CSR)
Expand All @@ -91,7 +92,7 @@ def dense_to_csr_local_python(A: dace.float32[M, N], B: CSR):
B.indptr[:] = tmp.indptr[:]
B.indices[:] = tmp.indices[:]
B.data[:] = tmp.data[:]

rng = np.random.default_rng(42)
tmp = sparse.random(20, 20, density=0.1, format='csr', dtype=np.float32, random_state=rng)
A = tmp.toarray()
Expand All @@ -118,12 +119,11 @@ def __init__(self, diag, upper, lower):
self.lower = lower

n, nblocks = dace.symbol('n'), dace.symbol('nblocks')
BlockTriDiagonal = dace.data.Structure(
dict(diagonal=dace.complex128[nblocks, n, n],
upper=dace.complex128[nblocks, n, n],
lower=dace.complex128[nblocks, n, n]),
name='BlockTriDiagonalMatrix')

BlockTriDiagonal = dace.data.Structure(dict(diagonal=dace.complex128[nblocks, n, n],
upper=dace.complex128[nblocks, n, n],
lower=dace.complex128[nblocks, n, n]),
name='BlockTriDiagonalMatrix')

@dace.program
def rgf_leftToRight(A: BlockTriDiagonal, B: BlockTriDiagonal, n_: dace.int32, nblocks_: dace.int32):

Expand All @@ -139,42 +139,41 @@ def rgf_leftToRight(A: BlockTriDiagonal, B: BlockTriDiagonal, n_: dace.int32, nb
# 2. Forward substitution
# From left to right
for i in range(1, nblocks_):
tmp[i] = np.linalg.inv(A.diagonal[i] - A.lower[i-1] @ tmp[i-1] @ A.upper[i-1])
tmp[i] = np.linalg.inv(A.diagonal[i] - A.lower[i - 1] @ tmp[i - 1] @ A.upper[i - 1])
# 3. Initialisation of last element of B
B.diagonal[-1] = tmp[-1]

# 4. Backward substitution
# From right to left

for i in range(nblocks_-2, -1, -1):
B.diagonal[i] = tmp[i] @ (identity + A.upper[i] @ B.diagonal[i+1] @ A.lower[i] @ tmp[i])
B.upper[i] = -tmp[i] @ A.upper[i] @ B.diagonal[i+1]
B.lower[i] = np.transpose(B.upper[i])
for i in range(nblocks_ - 2, -1, -1):
B.diagonal[i] = tmp[i] @ (identity + A.upper[i] @ B.diagonal[i + 1] @ A.lower[i] @ tmp[i])
B.upper[i] = -tmp[i] @ A.upper[i] @ B.diagonal[i + 1]
B.lower[i] = np.transpose(B.upper[i])

rng = np.random.default_rng(42)

A_diag = rng.random((10, 20, 20)) + 1j * rng.random((10, 20, 20))
A_upper = rng.random((10, 20, 20)) + 1j * rng.random((10, 20, 20))
A_lower = rng.random((10, 20, 20)) + 1j * rng.random((10, 20, 20))
A_lower = rng.random((10, 20, 20)) + 1j * rng.random((10, 20, 20))
inpBTD = BlockTriDiagonal.dtype._typeclass.as_ctypes()(diagonal=A_diag.__array_interface__['data'][0],
upper=A_upper.__array_interface__['data'][0],
lower=A_lower.__array_interface__['data'][0])

B_diag = np.zeros((10, 20, 20), dtype=np.complex128)
B_upper = np.zeros((10, 20, 20), dtype=np.complex128)
B_lower = np.zeros((10, 20, 20), dtype=np.complex128)
outBTD = BlockTriDiagonal.dtype._typeclass.as_ctypes()(diagonal=B_diag.__array_interface__['data'][0],
upper=B_upper.__array_interface__['data'][0],
lower=B_lower.__array_interface__['data'][0])

func = rgf_leftToRight.compile()
func(A=inpBTD, B=outBTD, n_=A_diag.shape[1], nblocks_=A_diag.shape[0], n=A_diag.shape[1], nblocks=A_diag.shape[0])

A = BTD(A_diag, A_upper, A_lower)
B = BTD(np.zeros((10, 20, 20), dtype=np.complex128),
np.zeros((10, 20, 20), dtype=np.complex128),
B = BTD(np.zeros((10, 20, 20), dtype=np.complex128), np.zeros((10, 20, 20), dtype=np.complex128),
np.zeros((10, 20, 20), dtype=np.complex128))

rgf_leftToRight.f(A, B, A_diag.shape[1], A_diag.shape[0])

assert np.allclose(B.diagonal, B_diag)
Expand All @@ -195,15 +194,15 @@ def csr_to_dense_python(A: CSR, B: dace.float32[M, N]):
for i in dace.map[0:M]:
for idx in dace.map[A.indptr[i]:A.indptr[i + 1]]:
B[i, A.indices[idx]] = A.data[idx]

rng = np.random.default_rng(42)
A = sparse.random(20, 20, density=0.1, format='csr', dtype=np.float32, random_state=rng)
ref = A.toarray()

inpA = CSR.dtype._typeclass.as_ctypes()(indptr=A.indptr.__array_interface__['data'][0],
indices=A.indices.__array_interface__['data'][0],
data=A.data.__array_interface__['data'][0])

# TODO: The following doesn't work because we need to create a Structure data descriptor from the ctypes class.
# csr_to_dense_python(inpA, B)
naive = csr_to_dense_python.to_sdfg(simplify=False)
Expand All @@ -224,9 +223,99 @@ def csr_to_dense_python(A: CSR, B: dace.float32[M, N]):
assert np.allclose(B, ref)


def test_write_structure_in_map():
M = dace.symbol('M')
N = dace.symbol('N')
Bundle = dace.data.Structure(members={
"data": dace.data.Array(dace.float32, (M, N)),
"size": dace.data.Scalar(dace.int64)
},
name="BundleType")

@dace.program
def init_prog(bundle: Bundle, fill_value: int) -> None:
for index in dace.map[0:bundle.size]:
bundle.data[index, :] = fill_value

data = np.zeros((10, 5), dtype=np.float32)
fill_value = 42
inp_struct = Bundle.dtype.base_type.as_ctypes()(
data=data.__array_interface__['data'][0],
size=9,
)
ref = np.zeros((10, 5), dtype=np.float32)
ref[:9, :] = fill_value

init_prog.compile()(inp_struct, fill_value, M=10, N=5)

assert np.allclose(data, ref)


def test_readwrite_structure_in_map():
M = dace.symbol('M')
N = dace.symbol('N')
Bundle = dace.data.Structure(members={
"data": dace.data.Array(dace.float32, (M, N)),
"data2": dace.data.Array(dace.float32, (M, N)),
"size": dace.data.Scalar(dace.int64)
},
name="BundleTypeTwoArrays")

@dace.program
def copy_prog(bundle: Bundle) -> None:
for index in dace.map[0:bundle.size]:
bundle.data[index, :] = bundle.data2[index, :] + 5

data = np.zeros((10, 5), dtype=np.float32)
data2 = np.ones((10, 5), dtype=np.float32)
inp_struct = Bundle.dtype.base_type.as_ctypes()(
data=data.__array_interface__['data'][0],
data2=data2.__array_interface__['data'][0],
size=ctypes.c_int64(6),
)
ref = np.zeros((10, 5), dtype=np.float32)
ref[:6, :] = 6.0

csdfg = copy_prog.compile()
csdfg.fast_call((ctypes.byref(inp_struct), ctypes.c_int(5)), (ctypes.c_int(5),))

assert np.allclose(data, ref)


def test_write_structure_in_loop():
M = dace.symbol('M')
N = dace.symbol('N')
Bundle = dace.data.Structure(members={
"data": dace.data.Array(dace.float32, (M, N)),
"size": dace.data.Scalar(dace.int64)
},
name="BundleType")

@dace.program
def init_prog(bundle: Bundle, fill_value: int) -> None:
for index in range(bundle.size):
bundle.data[index, :] = fill_value

data = np.zeros((10, 5), dtype=np.float32)
fill_value = 42
inp_struct = Bundle.dtype.base_type.as_ctypes()(
data=data.__array_interface__['data'][0],
size=6,
)
ref = np.zeros((10, 5), dtype=np.float32)
ref[:6, :] = fill_value

init_prog.compile()(inp_struct, fill_value, M=10, N=5)

assert np.allclose(data, ref)


if __name__ == '__main__':
test_read_structure()
test_write_structure()
test_local_structure()
test_rgf()
# test_read_structure_gpu()
test_write_structure_in_map()
test_readwrite_structure_in_map()
test_write_structure_in_loop()