Skip to content

Commit 96febcf

Browse files
committed
Implement PR feedback for Luka
1 parent 214f8da commit 96febcf

File tree

5 files changed

+87
-74
lines changed

5 files changed

+87
-74
lines changed

CHANGELOG.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid
1010

1111
### Added
1212
- Add manual type inference feature (CLI: `--input-type-map`/`--input-offset-map`) to resolve ambiguities when test inputs are not representative enough
13+
- Added a `testTypeInferenceDifferentTypes` test case to validate type inference for different input types
1314
- Added `_mangleNodeNames` function to avoid duplicate node mappings
1415
- Output Docker image digests per platform (`amd64`, `arm64`) after build, which is used to construct the multi-arch Docker manifest. This preventes registry clutter caused by unnecessary per-architecture Docker tags.
1516

@@ -21,7 +22,7 @@ This file contains the changelog for the Deeploy project. The changelog is divid
2122
- Resolved issue with missing `id` in the `Build Cache for Docker` step, used in the `Inject build-cache` step.
2223

2324
### Removed
24-
-
25+
- Delete outdated and unused `.gitlab-ci.yml` file
2526

2627
## Release v0.2.0 (2025-07-08) [#103](https://github.com/pulp-platform/Deeploy/pull/103)
2728
This release containing major architectural changes, new platform support, enhanced simulation workflows, floating-point kernel support, training infrastructure for CCT models, memory allocation strategies, and documentation improvements.

DeeployTest/generateNetwork.py

Lines changed: 43 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@
3939
from testUtils.typeMapping import inferInputType, parseDataType
4040

4141
from Deeploy.AbstractDataTypes import PointerClass
42-
from Deeploy.CommonExtensions.DataTypes import int8_t
42+
from Deeploy.CommonExtensions.DataTypes import IntegerDataTypes
4343
from Deeploy.CommonExtensions.OptimizationPasses.TopologyOptimizationPasses.DebugPasses import EmulateCMSISRequantPass
4444
from Deeploy.DeeployTypes import _NoVerbosity
4545
from Deeploy.Targets.CortexM.Platform import CMSISPlatform
@@ -62,39 +62,38 @@ def generateNetwork(args):
6262
else:
6363
activations = None
6464

65-
tensors = graph.tensors()
66-
6765
# build {name, type} and {name, offset} maps
6866
manual_types = {}
6967
manual_offsets = {}
7068
for kv in args.input_type_map:
7169
try:
7270
name, tstr = kv.split('=', 1)
73-
except ValueError:
74-
raise ValueError(f"Invalid --input-type-map entry '{kv}'. Expected NAME=TYPE.")
71+
except ValueError as exc:
72+
raise ValueError(f"Invalid --input-type-map entry '{kv}'. Expected NAME=TYPE.") from exc
7573
name, tstr = name.strip(), tstr.strip()
7674
try:
77-
manual_types[name] = PointerClass(parseDataType(tstr))
78-
except ValueError as e:
79-
raise ValueError(f"Invalid --input-type-map entry '{kv}': {e}")
75+
manual_types[name] = parseDataType(tstr)
76+
except ValueError as exc:
77+
raise ValueError(f"Invalid --input-type-map entry '{kv}': {e}") from exc
8078
for kv in args.input_offset_map:
8179
try:
8280
name, ostr = kv.split('=', 1)
83-
except ValueError:
84-
raise ValueError(f"Invalid --input-offset-map entry '{kv}'. Expected NAME=OFFSET.")
81+
except ValueError as exc:
82+
raise ValueError(f"Invalid --input-offset-map entry '{kv}'. Expected NAME=OFFSET.") from exc
8583
name, ostr = name.strip(), ostr.strip()
8684
try:
8785
manual_offsets[name] = int(ostr)
88-
except ValueError:
89-
raise ValueError(f"Invalid --input-offset-map entry '{kv}': OFFSET must be an integer.")
86+
except ValueError as exc:
87+
raise ValueError(f"Invalid --input-offset-map entry '{kv}': OFFSET must be an integer.") from exc
9088

9189
# Sanity check for unknown input names
92-
npz_names = set(inputs.files)
93-
bad_names = (set(manual_types) | set(manual_offsets)) - npz_names
94-
if bad_names:
95-
raise ValueError(f"Unknown input names in overrides: {bad_names}")
96-
97-
manual_keys = set(manual_types) | set(manual_offsets)
90+
manual_keys = set(manual_types)
91+
assert manual_keys == set(
92+
manual_offsets
93+
), f"Override inputs should have both type and offset specified. Inputs without both specified: {manual_keys ^ set(manual_types)}"
94+
assert manual_keys <= set(
95+
inputs.files
96+
), f"Unknown input names in overrides: {manual_keys - set(inputs.files)} (Valid names are: {set(inputs.files)})"
9897

9998
if args.debug:
10099
test_inputs, test_outputs, graph = generateDebugConfig(inputs, outputs, activations, graph)
@@ -114,15 +113,30 @@ def generateNetwork(args):
114113
for index, (name, num) in enumerate(zip(inputs.files, test_inputs)):
115114
if np.prod(num.shape) == 0:
116115
continue
117-
defaultType = manual_types.get(name, PointerClass(int8_t))
118-
defaultOffset = manual_offsets.get(name, 0)
119-
autoInfer = name not in manual_keys
120-
121-
_type, offset = inferInputType(num,
122-
signProp,
123-
defaultType = defaultType,
124-
defaultOffset = defaultOffset,
125-
autoInfer = autoInfer)[0]
116+
117+
if name in manual_keys:
118+
_type = manual_types[name]
119+
offset = manual_offsets[name]
120+
121+
# Check if the provided values fit into the dereferenced type
122+
vals = num.astype(np.int64) - offset
123+
if not _type.checkPromotion(vals):
124+
lo, hi = _type.typeMin, _type.typeMax
125+
raise RuntimeError(f"Provided type '{_type.typeName}' with offset {offset} "
126+
f"does not match input values in range [{vals.min()}, {vals.max()}] "
127+
f"(expected range [{lo}, {hi}])")
128+
129+
# Suggest a smaller fitting type if possible
130+
fitting_types = [t for t in sorted(IntegerDataTypes, key = lambda x: x.typeWidth) if t.checkPromotion(vals)]
131+
if fitting_types and fitting_types[0] is not _type:
132+
print(f"WARNING: Data spans [{int(vals.min())}, {int(vals.max())}], "
133+
f"which would fit in '{fitting_types[0].typeName}', "
134+
f"but user forced '{_type.typeName}'.")
135+
136+
_type = PointerClass(_type)
137+
else:
138+
_type, offset = inferInputType(num, signProp)[0]
139+
126140
inputTypes[f"input_{index}"] = _type
127141
inputOffsets[f"input_{index}"] = offset
128142

@@ -198,12 +212,14 @@ def generateNetwork(args):
198212
parser.add_argument('--input-type-map',
199213
nargs = '*',
200214
default = [],
215+
type = str,
201216
help = '(Optional) mapping of input names to data types. '
202217
'If not specified, types are inferred from the input data. '
203218
'Example: --input-type-map input_0=int8_t input_1=float32_t ...')
204219
parser.add_argument('--input-offset-map',
205220
nargs = '*',
206221
default = [],
222+
type = str,
207223
help = '(Optional) mapping of input names to offsets. '
208224
'If not specified, offsets are set to 0. '
209225
'Example: --input-offset-map input_0=0 input_1=128 ...')

DeeployTest/testUtils/codeGenerate.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ def generateTestInputsHeader(deployer: NetworkDeployer,
104104
print('Input:')
105105
for name in inputTypes.keys():
106106
buf = deployer.ctxt.lookup(name)
107-
print(f" - '{name}': Type: {buf._type.typeName}, Offset: {inputOffsets[name]}")
107+
print(f" - '{name}': Type: {buf._type.referencedType.typeName}, Offset: {inputOffsets[name]}")
108108

109109
return retStr
110110

@@ -172,10 +172,10 @@ def generateTestOutputsHeader(deployer: NetworkDeployer,
172172
if signProp:
173173
for (name, buf), (_, n_level), (_, signed) in zip(output_data_type.items(), output_n_levels.items(),
174174
output_signed.items()):
175-
print(f" - '{name}': Type: {buf.typeName}, nLevels: {n_level}, Signed: {signed}")
175+
print(f" - '{name}': Type: {buf.referencedType.typeName}, nLevels: {n_level}, Signed: {signed}")
176176
else:
177177
for (name, buf) in output_data_type.items():
178-
print(f" - '{name}': Type: {buf.typeName}")
178+
print(f" - '{name}': Type: {buf.referencedType.typeName}")
179179

180180
return retStr
181181

DeeployTest/testUtils/testRunner.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -180,12 +180,14 @@ def __init__(self, tiling_arguments: bool, description = None):
180180
self.add_argument('--input-type-map',
181181
nargs = '*',
182182
default = [],
183+
type = str,
183184
help = '(Optional) mapping of input names to data types. '
184185
'If not specified, types are inferred from the input data. '
185186
'Example: --input-type-map input_0=int8_t input_1=float32_t ...')
186187
self.add_argument('--input-offset-map',
187188
nargs = '*',
188189
default = [],
190+
type = str,
189191
help = '(Optional) mapping of input names to offsets. '
190192
'If not specified, offsets are set to 0. '
191193
'Example: --input-offset-map input_0=0 input_1=128 ...')

DeeployTest/testUtils/typeMapping.py

Lines changed: 37 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
# limitations under the License.
2525

2626
from collections import namedtuple
27-
from typing import List, Optional
27+
from typing import List
2828

2929
import numpy as np
3030

@@ -84,69 +84,63 @@ def dataWidth(n):
8484
return ret
8585

8686

87-
def inferInputType(_input: np.ndarray,
88-
signProp: Optional[bool] = None,
89-
defaultType = PointerClass(int8_t),
90-
defaultOffset = 0,
91-
autoInfer: bool = True) -> List[offsetType]:
87+
def inferInputType(values: np.ndarray,
88+
signProp: bool = False,
89+
defaultType = int8_t,
90+
defaultOffset = 0) -> List[offsetType]:
91+
"""Infers the data type of the provided input array.
9292
93-
# WIESEP: We cannot do type inference for empty arrays.
94-
if np.prod(_input.shape) == 0:
95-
print(f"Warning: Empty input array for type inference for {_input}!")
96-
return [(defaultType, defaultOffset)]
93+
Parameters
94+
----------
95+
values : np.ndarray
96+
The input array for which to infer the data type.
9797
98-
# If the caller provided a manual override, skip all inference.
99-
if not autoInfer:
100-
rawType = defaultType.referencedType
101-
vals = (_input.astype(np.int64) - defaultOffset)
102-
if not rawType.checkPromotion(vals):
103-
lo, hi = rawType.typeMin, rawType.typeMax
104-
raise RuntimeError(f"Provided type {rawType.typeName} with offset {defaultOffset} "
105-
f"does not match input values in range [{vals.min()}, {vals.max()}] "
106-
f"(expected range [{lo}, {hi}])")
107-
108-
smallest = rawType
109-
for t in sorted(IntegerDataTypes, key = lambda x: x.typeWidth):
110-
if t.checkPromotion(vals):
111-
smallest = t
112-
break
113-
if smallest is not rawType:
114-
print(f"WARNING: Data spans [{int(vals.min())}, {int(vals.max())}], "
115-
f"which would fit in {smallest.typeName}, "
116-
f"but user forced {rawType.typeName}.")
117-
return [(defaultType, defaultOffset)]
98+
signProp : bool
99+
Whether to consider signedness when inferring the data type.
100+
101+
defaultType : type
102+
The default data type to use if inference fails.
103+
104+
defaultOffset : int
105+
The default offset to use if inference fails.
118106
119-
if signProp is None:
120-
signProp = False
107+
Returns
108+
-------
109+
List[offsetType]
110+
A list of inferred data types and their corresponding offsets.
111+
"""
112+
113+
# WIESEP: We cannot do type inference for empty arrays.
114+
if np.prod(values.shape) == 0:
115+
print(f"Warning: Empty input array for type inference for {values}!")
116+
return [(defaultType, defaultOffset)]
121117

122118
signedPlatformTypes = [_type for _type in IntegerDataTypes if _type.typeMin < 0]
123119

124120
matchingTypes = []
125121

126-
# FIXME: this is okay for now (3 distinctions are fine), but there is implicit
127-
# knowledge encoded in the order of the checks (i.e. first unsigned, signed
128-
# and then float). It might be good to extract that implicit knowledge into an ordered list.
129-
if signProp and isUnsigned(_input) and isInteger(_input):
122+
# There is implicit knowledge encoded in the order of the checks (i.e. first unsigned, signed
123+
# and then float).
124+
if signProp and isUnsigned(values) and isInteger(values):
130125
for _type in sorted(signedPlatformTypes, key = lambda x: x.typeWidth):
131126
signPropOffset = (2**(_type.typeWidth - 1))
132-
if _type.checkPromotion(_input - signPropOffset):
127+
if _type.checkPromotion(values - signPropOffset):
133128
matchingTypes.append(offsetType(PointerClass(_type), signPropOffset))
134-
elif isInteger(_input):
129+
elif isInteger(values):
135130
sorted_types = sorted(
136131
IntegerDataTypes,
137132
key = lambda t: (t.typeWidth, t.typeMin < 0),
138133
)
139134

140-
matchingTypes = []
141135
for _type in sorted_types:
142-
if _type.checkPromotion(_input):
136+
if _type.checkPromotion(values):
143137
matchingTypes.append(offsetType(PointerClass(_type), 0))
144138
else:
145139
for _type in sorted(FloatDataTypes, key = lambda x: x.typeWidth):
146-
if _type.checkPromotion(_input):
140+
if _type.checkPromotion(values):
147141
matchingTypes.append(offsetType(PointerClass(_type), 0))
148142

149-
if matchingTypes == []:
150-
raise Exception("Could not find a matching type!")
143+
if not matchingTypes:
144+
raise RuntimeError("Could not find a matching type!")
151145

152146
return matchingTypes

0 commit comments

Comments
 (0)