Skip to content

Commit 1a9c681

Browse files
authored
Merge pull request #6123 from MetRonnie/tokens
Tokens: handle ISO 8601 long format cycle points
2 parents 8690b03 + e1f7205 commit 1a9c681

File tree

5 files changed

+74
-4
lines changed

5 files changed

+74
-4
lines changed

changes.d/6123.fix.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Allow long-format datetime cycle points in IDs used on the command line.

cylc/flow/id.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -403,7 +403,7 @@ def duplicate(
403403
# //cycle[:sel][/task[:sel][/job[:sel]]]
404404
RELATIVE_PATTERN = rf'''
405405
//
406-
(?P<{IDTokens.Cycle.value}>[^~\/:\n]+)
406+
(?P<{IDTokens.Cycle.value}>[^~\/:\n][^~\/\n]*?)
407407
(?:
408408
:
409409
(?P<{IDTokens.Cycle.value}_sel>[^\/:\n]+)

cylc/flow/id_cli.py

Lines changed: 36 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,9 @@
2020
import re
2121
from typing import Optional, Dict, List, Tuple, Any
2222

23+
from metomi.isodatetime.parsers import TimePointParser
24+
from metomi.isodatetime.exceptions import ISO8601SyntaxError
25+
2326
from cylc.flow import LOG
2427
from cylc.flow.exceptions import (
2528
InputError,
@@ -28,6 +31,7 @@
2831
from cylc.flow.id import (
2932
Tokens,
3033
contains_multiple_workflows,
34+
tokenise,
3135
upgrade_legacy_ids,
3236
)
3337
from cylc.flow.pathutil import EXPLICIT_RELATIVE_PATH_REGEX
@@ -43,6 +47,36 @@
4347

4448

4549
FN_CHARS = re.compile(r'[\*\?\[\]\!]')
50+
TP_PARSER = TimePointParser()
51+
52+
53+
def cli_tokenise(id_: str) -> Tokens:
54+
"""Tokenise with support for long-format datetimes.
55+
56+
If a cycle selector is present, it could be part of a long-format
57+
ISO 8601 datetime that was erroneously split. Re-attach it if it
58+
results in a valid datetime.
59+
60+
Examples:
61+
>>> f = lambda t: {k: v for k, v in t.items() if v is not None}
62+
>>> f(cli_tokenise('foo//2021-01-01T00:00Z'))
63+
{'workflow': 'foo', 'cycle': '2021-01-01T00:00Z'}
64+
>>> f(cli_tokenise('foo//2021-01-01T00:horse'))
65+
{'workflow': 'foo', 'cycle': '2021-01-01T00', 'cycle_sel': 'horse'}
66+
"""
67+
tokens = tokenise(id_)
68+
cycle = tokens['cycle']
69+
cycle_sel = tokens['cycle_sel']
70+
if not (cycle and cycle_sel) or '-' not in cycle:
71+
return tokens
72+
cycle = f'{cycle}:{cycle_sel}'
73+
try:
74+
TP_PARSER.parse(cycle)
75+
except ISO8601SyntaxError:
76+
return tokens
77+
dict.__setitem__(tokens, 'cycle', cycle)
78+
del tokens['cycle_sel']
79+
return tokens
4680

4781

4882
def _parse_cli(*ids: str) -> List[Tokens]:
@@ -124,14 +158,14 @@ def _parse_cli(*ids: str) -> List[Tokens]:
124158
tokens_list: List[Tokens] = []
125159
for id_ in ids:
126160
try:
127-
tokens = Tokens(id_)
161+
tokens = cli_tokenise(id_)
128162
except ValueError:
129163
if id_.endswith('/') and not id_.endswith('//'): # noqa: SIM106
130164
# tolerate IDs that end in a single slash on the CLI
131165
# (e.g. CLI auto completion)
132166
try:
133167
# this ID is invalid with or without the trailing slash
134-
tokens = Tokens(id_[:-1])
168+
tokens = cli_tokenise(id_[:-1])
135169
except ValueError:
136170
raise InputError(f'Invalid ID: {id_}')
137171
else:

tests/unit/test_id.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ def test_universal_id_matches_hierarchical(identifier):
186186
'//~',
187187
'//:',
188188
'//workflow//cycle',
189-
'//task:task_sel:task_sel'
189+
'//cycle/task:task_sel:task_sel'
190190
]
191191
)
192192
def test_relative_id_illegal(identifier):

tests/unit/test_id_cli.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
_validate_constraint,
3131
_validate_workflow_ids,
3232
_validate_number,
33+
cli_tokenise,
3334
parse_ids_async,
3435
)
3536
from cylc.flow.pathutil import get_cylc_run_dir
@@ -607,3 +608,37 @@ async def test_expand_workflow_tokens_impl_selector(no_scan):
607608
tokens = tokens.duplicate(workflow_sel='stopped')
608609
with pytest.raises(InputError):
609610
await _expand_workflow_tokens([tokens])
611+
612+
613+
@pytest.mark.parametrize('identifier, expected', [
614+
(
615+
'//2024-01-01T00:fail/a',
616+
{'cycle': '2024-01-01T00', 'cycle_sel': 'fail', 'task': 'a'}
617+
),
618+
(
619+
'//2024-01-01T00:00Z/a',
620+
{'cycle': '2024-01-01T00:00Z', 'task': 'a'}
621+
),
622+
(
623+
'//2024-01-01T00:00Z:fail/a',
624+
{'cycle': '2024-01-01T00:00Z', 'cycle_sel': 'fail', 'task': 'a'}
625+
),
626+
(
627+
'//2024-01-01T00:00:00+05:30/a',
628+
{'cycle': '2024-01-01T00:00:00+05:30', 'task': 'a'}
629+
),
630+
(
631+
'//2024-01-01T00:00:00+05:30:f/a',
632+
{'cycle': '2024-01-01T00:00:00+05:30', 'cycle_sel': 'f', 'task': 'a'}
633+
),
634+
(
635+
# Nonsensical example, but whatever...
636+
'//2024-01-01T00:00Z:00Z/a',
637+
{'cycle': '2024-01-01T00:00Z', 'cycle_sel': '00Z', 'task': 'a'}
638+
)
639+
])
640+
def test_iso_long_fmt(identifier, expected):
641+
assert {
642+
k: v for k, v in cli_tokenise(identifier).items()
643+
if v is not None
644+
} == expected

0 commit comments

Comments
 (0)