Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 30 additions & 10 deletions turbopack/crates/turbo-tasks-backend/src/kv_backing_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,8 +357,12 @@ impl<T: KeyValueDatabase + Send + Sync + 'static> BackingStorageSealed

let mut task_type_bytes = Vec::new();
for (task_type, task_id) in updates {
serialize_task_type(
&task_type,
&mut task_type_bytes,
Some(task_id),
)?;
let task_id: u32 = *task_id;
serialize_task_type(&task_type, &mut task_type_bytes, task_id)?;

batch
.put(
Expand Down Expand Up @@ -441,8 +445,8 @@ impl<T: KeyValueDatabase + Send + Sync + 'static> BackingStorageSealed
.entered();
let mut task_type_bytes = Vec::new();
for (task_type, task_id) in task_cache_updates.into_iter().flatten() {
serialize_task_type(&task_type, &mut task_type_bytes, Some(task_id))?;
let task_id = *task_id;
serialize_task_type(&task_type, &mut task_type_bytes, task_id)?;

batch
.put(
Expand Down Expand Up @@ -499,8 +503,10 @@ impl<T: KeyValueDatabase + Send + Sync + 'static> BackingStorageSealed
tx: &D::ReadTransaction<'_>,
task_type: &CachedTaskType,
) -> Result<Option<TaskId>> {
let task_type = POT_CONFIG.serialize(task_type)?;
let Some(bytes) = database.get(tx, KeySpace::ForwardTaskCache, &task_type)? else {
let mut task_type_bytes = Vec::new();
serialize_task_type(task_type, &mut task_type_bytes, None)?;
let Some(bytes) = database.get(tx, KeySpace::ForwardTaskCache, &task_type_bytes)?
else {
return Ok(None);
};
let bytes = bytes.borrow().try_into()?;
Expand Down Expand Up @@ -645,23 +651,37 @@ where
Ok(())
}

// DO NOT REMOVE THE `inline(never)` ATTRIBUTE!
// `pot` uses the pointer address of `&'static str` to deduplicate Symbols.
// If this function is inlined into multiple different callsites it might inline the Serialize
// implementation too, which can pull a `&'static str` from another crate into this crate.
// Since string deduplication between crates is not guaranteed, it can lead to behavior changes due
// to the pointer addresses. This can lead to lookup path and store path creating different
// serialization of the same task type, which breaks task cache lookups.
#[inline(never)]
fn serialize_task_type(
task_type: &Arc<CachedTaskType>,
task_type: &CachedTaskType,
mut task_type_bytes: &mut Vec<u8>,
task_id: u32,
task_id: Option<TaskId>,
) -> Result<()> {
task_type_bytes.clear();
POT_CONFIG
.serialize_into(&**task_type, &mut task_type_bytes)
.with_context(|| anyhow!("Unable to serialize task {task_id} cache key {task_type:?}"))?;
.serialize_into(task_type, &mut task_type_bytes)
.with_context(|| {
if let Some(task_id) = task_id {
anyhow!("Unable to serialize task {task_id} cache key {task_type:?}")
} else {
anyhow!("Unable to serialize task cache key {task_type:?}")
}
})?;
#[cfg(feature = "verify_serialization")]
{
let deserialize: Result<CachedTaskType, _> = serde_path_to_error::deserialize(
&mut pot_de_symbol_list().deserializer_for_slice(&*task_type_bytes)?,
);
if let Err(err) = deserialize {
println!("Task type would not be deserializable {task_id}: {err:?}\n{task_type:#?}");
panic!("Task type would not be deserializable {task_id}: {err:?}");
println!("Task type would not be deserializable {task_id:?}: {err:?}\n{task_type:#?}");
panic!("Task type would not be deserializable {task_id:?}: {err:?}");
}
}
Ok(())
Expand Down
Loading