Skip to content

Commit 4ca43c0

Browse files
authored
Merge branch 'main' into core-update
2 parents 1331902 + 723d234 commit 4ca43c0

File tree

2 files changed

+52
-11
lines changed

2 files changed

+52
-11
lines changed

temporalio/workflow.py

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -500,6 +500,14 @@ class UpdateInfo:
500500
name: str
501501
"""Update type name."""
502502

503+
@property
504+
def _logger_details(self) -> Mapping[str, Any]:
505+
"""Data to be included in string appended to default logging output."""
506+
return {
507+
"update_id": self.id,
508+
"update_name": self.name,
509+
}
510+
503511

504512
class _Runtime(ABC):
505513
@staticmethod
@@ -1211,6 +1219,10 @@ class LoggerAdapter(logging.LoggerAdapter):
12111219
use by others. Default is False.
12121220
log_during_replay: Boolean for whether logs should occur during replay.
12131221
Default is False.
1222+
1223+
Values added to ``extra`` are merged with the ``extra`` dictionary from a
1224+
logging call, with values from the logging call taking precedence. I.e. the
1225+
behavior is that of `merge_extra=True` in Python >= 3.13.
12141226
"""
12151227

12161228
def __init__(
@@ -1232,20 +1244,28 @@ def process(
12321244
or self.workflow_info_on_extra
12331245
or self.full_workflow_info_on_extra
12341246
):
1247+
extra: Dict[str, Any] = {}
1248+
msg_extra: Dict[str, Any] = {}
12351249
runtime = _Runtime.maybe_current()
12361250
if runtime:
1251+
workflow_details = runtime.logger_details
12371252
if self.workflow_info_on_message:
1238-
msg = f"{msg} ({runtime.logger_details})"
1253+
msg_extra.update(workflow_details)
12391254
if self.workflow_info_on_extra:
1240-
# Extra can be absent or None, this handles both
1241-
extra = kwargs.get("extra", None) or {}
1242-
extra["temporal_workflow"] = runtime.logger_details
1243-
kwargs["extra"] = extra
1255+
extra["temporal_workflow"] = workflow_details
12441256
if self.full_workflow_info_on_extra:
1245-
# Extra can be absent or None, this handles both
1246-
extra = kwargs.get("extra", None) or {}
12471257
extra["workflow_info"] = runtime.workflow_info()
1248-
kwargs["extra"] = extra
1258+
update_info = current_update_info()
1259+
if update_info:
1260+
update_details = update_info._logger_details
1261+
if self.workflow_info_on_message:
1262+
msg_extra.update(update_details)
1263+
if self.workflow_info_on_extra:
1264+
extra.setdefault("temporal_workflow", {}).update(update_details)
1265+
1266+
kwargs["extra"] = {**extra, **(kwargs.get("extra") or {})}
1267+
if msg_extra:
1268+
msg = f"{msg} ({msg_extra})"
12491269
return (msg, kwargs)
12501270

12511271
def isEnabledFor(self, level: int) -> bool:

tests/worker/test_workflow.py

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1910,6 +1910,10 @@ def my_signal(self, value: str) -> None:
19101910
self._last_signal = value
19111911
workflow.logger.info(f"Signal: {value}")
19121912

1913+
@workflow.update
1914+
def my_update(self, value: str) -> None:
1915+
workflow.logger.info(f"Update: {value}")
1916+
19131917
@workflow.query
19141918
def last_signal(self) -> str:
19151919
return self._last_signal
@@ -1957,14 +1961,22 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19571961
id=f"workflow-{uuid.uuid4()}",
19581962
task_queue=worker.task_queue,
19591963
)
1960-
# Send a couple signals
1964+
# Send some signals and updates
19611965
await handle.signal(LoggingWorkflow.my_signal, "signal 1")
19621966
await handle.signal(LoggingWorkflow.my_signal, "signal 2")
1967+
await handle.execute_update(
1968+
LoggingWorkflow.my_update, "update 1", id="update-1"
1969+
)
1970+
await handle.execute_update(
1971+
LoggingWorkflow.my_update, "update 2", id="update-2"
1972+
)
19631973
assert "signal 2" == await handle.query(LoggingWorkflow.last_signal)
19641974

1965-
# Confirm two logs happened
1975+
# Confirm logs were produced
19661976
assert capturer.find_log("Signal: signal 1 ({'attempt':")
19671977
assert capturer.find_log("Signal: signal 2")
1978+
assert capturer.find_log("Update: update 1")
1979+
assert capturer.find_log("Update: update 2")
19681980
assert not capturer.find_log("Signal: signal 3")
19691981
# Also make sure it has some workflow info and correct funcName
19701982
record = capturer.find_log("Signal: signal 1")
@@ -1976,6 +1988,15 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19761988
)
19771989
# Since we enabled full info, make sure it's there
19781990
assert isinstance(record.__dict__["workflow_info"], workflow.Info)
1991+
# Check the log emitted by the update execution.
1992+
record = capturer.find_log("Update: update 1")
1993+
assert (
1994+
record
1995+
and record.__dict__["temporal_workflow"]["update_id"] == "update-1"
1996+
and record.__dict__["temporal_workflow"]["update_name"] == "my_update"
1997+
and "'update_id': 'update-1'" in record.message
1998+
and "'update_name': 'my_update'" in record.message
1999+
)
19792000

19802001
# Clear queue and start a new one with more signals
19812002
capturer.log_queue.queue.clear()
@@ -1985,7 +2006,7 @@ async def test_workflow_logging(client: Client, env: WorkflowEnvironment):
19852006
task_queue=worker.task_queue,
19862007
max_cached_workflows=0,
19872008
) as worker:
1988-
# Send a couple signals
2009+
# Send signals and updates
19892010
await handle.signal(LoggingWorkflow.my_signal, "signal 3")
19902011
await handle.signal(LoggingWorkflow.my_signal, "finish")
19912012
await handle.result()

0 commit comments

Comments
 (0)