Skip to content

Commit c9dbc2d

Browse files
gijzelaerrclaude
andcommitted
Fix issues in symbolic addressing: maps, docstring, source detection
- Move getter/setter dispatch maps to module-level constants to avoid rebuilding them on every read/write call - Move set_lword from _INT_SETTER_MAP to _SIMPLE_SETTER_MAP so its value is not cast through int() redundantly - Fix read_many docstring to honestly describe individual reads instead of claiming batched/grouped behavior - Extract _read_source() helper for from_csv/from_json that checks for newlines before falling back to filesystem path detection, preventing inline content from accidentally matching an existing file - Document get_word type annotation mismatch (returns int at runtime despite bytearray annotation in getters.py) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 1afa1ed commit c9dbc2d

File tree

1 file changed

+94
-70
lines changed

1 file changed

+94
-70
lines changed

snap7/util/symbols.py

Lines changed: 94 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,65 @@
7474

7575
logger = getLogger(__name__)
7676

77+
# ---------------------------------------------------------------------------
78+
# Module-level getter/setter dispatch maps (built once, not per call)
79+
# ---------------------------------------------------------------------------
80+
81+
_GETTER_MAP: Dict[str, Any] = {
82+
"BYTE": get_byte,
83+
"SINT": get_sint,
84+
"USINT": get_usint,
85+
"CHAR": get_char,
86+
"INT": get_int,
87+
"UINT": get_uint,
88+
# NOTE: get_word is annotated as returning bytearray but actually returns
89+
# int at runtime (struct.unpack(">H", ...) -> int). It behaves correctly.
90+
"WORD": get_word,
91+
"DATE": get_date,
92+
"DINT": get_dint,
93+
"UDINT": get_udint,
94+
"DWORD": get_dword,
95+
"REAL": get_real,
96+
"TIME": get_time,
97+
"TOD": get_tod,
98+
"TIME_OF_DAY": get_tod,
99+
"DATE_AND_TIME": get_dt,
100+
"DT": get_dt,
101+
"LREAL": get_lreal,
102+
"LWORD": get_lword,
103+
"WCHAR": get_wchar,
104+
"DTL": get_dtl,
105+
}
106+
107+
# Setters that cast value to int before calling
108+
_INT_SETTER_MAP: Dict[str, Any] = {
109+
"BYTE": set_byte,
110+
"SINT": set_sint,
111+
"USINT": set_usint,
112+
"INT": set_int,
113+
"UINT": set_uint,
114+
"WORD": set_word,
115+
"DINT": set_dint,
116+
"UDINT": set_udint,
117+
"DWORD": set_dword,
118+
}
119+
120+
# Setters that pass value through without casting
121+
_SIMPLE_SETTER_MAP: Dict[str, Any] = {
122+
"REAL": set_real,
123+
"LREAL": set_lreal,
124+
"CHAR": set_char,
125+
"WCHAR": set_wchar,
126+
"TIME": set_time,
127+
"DATE": set_date,
128+
"TOD": set_tod,
129+
"TIME_OF_DAY": set_tod,
130+
"DATE_AND_TIME": set_dt,
131+
"DT": set_dt,
132+
"DTL": set_dtl,
133+
"LWORD": set_lword,
134+
}
135+
77136
# Mapping from S7 type name to the number of bytes needed to read
78137
_TYPE_SIZE: Dict[str, int] = {
79138
"BOOL": 1,
@@ -104,6 +163,25 @@
104163
_STRING_RE = re.compile(r"^(STRING|WSTRING|FSTRING)\[(\d+)]$", re.IGNORECASE)
105164

106165

166+
def _read_source(source: Union[str, Path]) -> str:
167+
"""Resolve *source* to text content.
168+
169+
If *source* is a :class:`~pathlib.Path` it is always read as a file.
170+
If it is a string that contains a newline character it is treated as
171+
inline content (CSV / JSON). Otherwise the string is checked as a
172+
file path and read if it exists; if not it is returned verbatim.
173+
"""
174+
if isinstance(source, Path):
175+
return source.read_text()
176+
s = str(source)
177+
if "\n" in s:
178+
return s
179+
path = Path(s)
180+
if path.exists():
181+
return path.read_text()
182+
return s
183+
184+
107185
@dataclass(frozen=True)
108186
class TagAddress:
109187
"""Resolved address for a single PLC tag."""
@@ -199,16 +277,13 @@ def from_csv(cls, source: Union[str, Path]) -> "SymbolTable":
199277
An optional ``bit`` column overrides the bit index parsed from the offset.
200278
201279
Args:
202-
source: path to a CSV file, or a CSV-formatted string.
280+
source: path to a CSV file, or a CSV-formatted string. Strings
281+
that contain newlines are always treated as inline CSV content.
203282
204283
Returns:
205284
A new :class:`SymbolTable`.
206285
"""
207-
path = Path(source)
208-
if path.exists():
209-
text = path.read_text()
210-
else:
211-
text = str(source)
286+
text = _read_source(source)
212287

213288
reader = csv.DictReader(io.StringIO(text))
214289
tags: Dict[str, Dict[str, Any]] = {}
@@ -232,16 +307,13 @@ def from_json(cls, source: Union[str, Path]) -> "SymbolTable":
232307
each with keys ``db``, ``offset``, ``type``, and optionally ``bit``.
233308
234309
Args:
235-
source: path to a JSON file, or a JSON-formatted string.
310+
source: path to a JSON file, or a JSON-formatted string. Strings
311+
that contain newlines are always treated as inline JSON content.
236312
237313
Returns:
238314
A new :class:`SymbolTable`.
239315
"""
240-
path = Path(source)
241-
if path.exists():
242-
text = path.read_text()
243-
else:
244-
text = str(source)
316+
text = _read_source(source)
245317

246318
data: Dict[str, Dict[str, Any]] = json.loads(text)
247319
return cls(data)
@@ -321,7 +393,10 @@ def write(self, client: Client, tag: str, value: Any) -> None:
321393
client.db_write(addr.db, addr.byte_offset, data)
322394

323395
def read_many(self, client: Client, tags: list[str]) -> Dict[str, ValueType]:
324-
"""Read multiple tags, grouping reads by DB where possible.
396+
"""Read multiple tags individually and return them as a dictionary.
397+
398+
This is a convenience method that reads each tag one at a time via
399+
:meth:`read`. It does **not** batch or group reads.
325400
326401
Args:
327402
client: a connected :class:`~snap7.client.Client`.
@@ -361,32 +436,8 @@ def _get_value(data: bytearray, base_offset: int, addr: TagAddress) -> ValueType
361436
elif kind == "WSTRING":
362437
return get_wstring(data, offset)
363438

364-
_getter_map: Dict[str, Any] = {
365-
"BYTE": get_byte,
366-
"SINT": get_sint,
367-
"USINT": get_usint,
368-
"CHAR": get_char,
369-
"INT": get_int,
370-
"UINT": get_uint,
371-
"WORD": get_word,
372-
"DATE": get_date,
373-
"DINT": get_dint,
374-
"UDINT": get_udint,
375-
"DWORD": get_dword,
376-
"REAL": get_real,
377-
"TIME": get_time,
378-
"TOD": get_tod,
379-
"TIME_OF_DAY": get_tod,
380-
"DATE_AND_TIME": get_dt,
381-
"DT": get_dt,
382-
"LREAL": get_lreal,
383-
"LWORD": get_lword,
384-
"WCHAR": get_wchar,
385-
"DTL": get_dtl,
386-
}
387-
388-
if upper in _getter_map:
389-
return _getter_map[upper](data, offset) # type: ignore[no-any-return]
439+
if upper in _GETTER_MAP:
440+
return _GETTER_MAP[upper](data, offset) # type: ignore[no-any-return]
390441

391442
raise ValueError(f"Unsupported S7 type for reading: {addr.type}")
392443

@@ -416,39 +467,12 @@ def _set_value(data: bytearray, base_offset: int, addr: TagAddress, value: Any)
416467
set_wstring(data, offset, str(value), length)
417468
return
418469

419-
_int_setter_map: Dict[str, Any] = {
420-
"BYTE": set_byte,
421-
"SINT": set_sint,
422-
"USINT": set_usint,
423-
"INT": set_int,
424-
"UINT": set_uint,
425-
"WORD": set_word,
426-
"DINT": set_dint,
427-
"UDINT": set_udint,
428-
"DWORD": set_dword,
429-
"LWORD": set_lword,
430-
}
431-
432-
if upper in _int_setter_map:
433-
_int_setter_map[upper](data, offset, int(value))
470+
if upper in _INT_SETTER_MAP:
471+
_INT_SETTER_MAP[upper](data, offset, int(value))
434472
return
435473

436-
_simple_setter_map: Dict[str, Any] = {
437-
"REAL": set_real,
438-
"LREAL": set_lreal,
439-
"CHAR": set_char,
440-
"WCHAR": set_wchar,
441-
"TIME": set_time,
442-
"DATE": set_date,
443-
"TOD": set_tod,
444-
"TIME_OF_DAY": set_tod,
445-
"DATE_AND_TIME": set_dt,
446-
"DT": set_dt,
447-
"DTL": set_dtl,
448-
}
449-
450-
if upper in _simple_setter_map:
451-
_simple_setter_map[upper](data, offset, value)
474+
if upper in _SIMPLE_SETTER_MAP:
475+
_SIMPLE_SETTER_MAP[upper](data, offset, value)
452476
return
453477

454478
raise ValueError(f"Unsupported S7 type for writing: {addr.type}")

0 commit comments

Comments
 (0)