Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
@import 'tailwindcss';
10 changes: 10 additions & 0 deletions test/e2e/app-dir/no-double-tailwind-execution/app/layout.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import './globals.css'

import { ReactNode } from 'react'
export default function Root({ children }: { children: ReactNode }) {
return (
<html>
<body>{children}</body>
</html>
)
}
3 changes: 3 additions & 0 deletions test/e2e/app-dir/no-double-tailwind-execution/app/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export default function Page() {
return <p>hello world</p>
}
6 changes: 6 additions & 0 deletions test/e2e/app-dir/no-double-tailwind-execution/next.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
/**
* @type {import('next').NextConfig}
*/
const nextConfig = {}

module.exports = nextConfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { nextTestSetup } from 'e2e-utils'
import { retry } from 'next-test-utils'

describe('no-double-tailwind-execution', () => {
const { next, isNextDev, skipped } = nextTestSetup({
files: __dirname,
skipDeployment: true,
dependencies: {
'@tailwindcss/postcss': '^4',
tailwindcss: '^4',
},
env: {
DEBUG: 'tailwindcss',
...process.env,
},
})

if (skipped) {
return
}

it('should run tailwind only once initially and per change', async () => {
const browser = await next.browser('/')
expect(await browser.elementByCss('p').text()).toBe('hello world')

if (isNextDev) {
const filePath = 'app/page.tsx'
const origContent = await next.readFile(filePath)
let getOutput = next.getCliOutputFromHere()
await next.patchFile(
filePath,
origContent.replace('hello world', 'hello hmr'),
async () => {
await retry(async () => {
expect(await browser.elementByCss('p').text()).toBe('hello hmr')
let tailwindProcessingCount = [
...getOutput().matchAll(
/\[@tailwindcss\/postcss\] app\/globals.css/g
),
].length
expect(tailwindProcessingCount).toBe(1)
})
}
)
}
let tailwindProcessingCount = [
...next.cliOutput.matchAll(/\[@tailwindcss\/postcss\] app\/globals.css/g),
].length
if (isNextDev) {
expect(tailwindProcessingCount).toBe(3) // dev: initial + hmr + hmr (revert)
} else {
expect(tailwindProcessingCount).toBe(1) // build
}
})
})
1 change: 1 addition & 0 deletions test/e2e/app-dir/no-double-tailwind-execution/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{}
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
const config = {
plugins: ['@tailwindcss/postcss'],
}

export default config
Original file line number Diff line number Diff line change
Expand Up @@ -601,6 +601,7 @@ pub enum AnyOperation {
ConnectChild(connect_child::ConnectChildOperation),
Invalidate(invalidate::InvalidateOperation),
UpdateOutput(update_output::UpdateOutputOperation),
UpdateCell(update_cell::UpdateCellOperation),
CleanupOldEdges(cleanup_old_edges::CleanupOldEdgesOperation),
AggregationUpdate(aggregation_update::AggregationUpdateQueue),
Nested(Vec<AnyOperation>),
Expand All @@ -612,6 +613,7 @@ impl AnyOperation {
AnyOperation::ConnectChild(op) => op.execute(ctx),
AnyOperation::Invalidate(op) => op.execute(ctx),
AnyOperation::UpdateOutput(op) => op.execute(ctx),
AnyOperation::UpdateCell(op) => op.execute(ctx),
AnyOperation::CleanupOldEdges(op) => op.execute(ctx),
AnyOperation::AggregationUpdate(op) => op.execute(ctx),
AnyOperation::Nested(ops) => {
Expand All @@ -626,6 +628,7 @@ impl AnyOperation {
impl_operation!(ConnectChild connect_child::ConnectChildOperation);
impl_operation!(Invalidate invalidate::InvalidateOperation);
impl_operation!(UpdateOutput update_output::UpdateOutputOperation);
impl_operation!(UpdateCell update_cell::UpdateCellOperation);
impl_operation!(CleanupOldEdges cleanup_old_edges::CleanupOldEdgesOperation);
impl_operation!(AggregationUpdate aggregation_update::AggregationUpdateQueue);

Expand All @@ -639,6 +642,5 @@ pub use self::{
cleanup_old_edges::OutdatedEdge,
connect_children::connect_children,
prepare_new_children::prepare_new_children,
update_cell::UpdateCellOperation,
update_collectible::UpdateCollectibleOperation,
};
Original file line number Diff line number Diff line change
@@ -1,17 +1,37 @@
use std::mem::take;

use serde::{Deserialize, Serialize};
use smallvec::SmallVec;
use turbo_tasks::{CellId, TaskId, backend::CellContent};

#[cfg(feature = "trace_task_dirty")]
use crate::backend::operation::invalidate::TaskDirtyCause;
use crate::{
backend::{
TaskDataCategory,
operation::{ExecuteContext, InvalidateOperation, TaskGuard},
operation::{
AggregationUpdateQueue, ExecuteContext, Operation, TaskGuard,
invalidate::make_task_dirty_internal,
},
storage::{get_many, remove},
},
data::{CachedDataItem, CachedDataItemKey},
data::{CachedDataItem, CachedDataItemKey, CellRef},
};

pub struct UpdateCellOperation;
#[derive(Serialize, Deserialize, Clone, Default)]
#[allow(clippy::large_enum_variant)]
pub enum UpdateCellOperation {
InvalidateWhenCellDependency {
cell_ref: CellRef,
dependent_tasks: SmallVec<[TaskId; 4]>,
queue: AggregationUpdateQueue,
},
AggregationUpdate {
queue: AggregationUpdateQueue,
},
#[default]
Done,
}

impl UpdateCellOperation {
pub fn run(task_id: TaskId, cell: CellId, content: CellContent, mut ctx: impl ExecuteContext) {
Expand Down Expand Up @@ -39,7 +59,7 @@ impl UpdateCellOperation {
// This is a hack for the streaming hack. Stateful tasks are never recomputed, so this forces invalidation for them in case of this hack.
task.has_key(&CachedDataItemKey::Stateful {}))
{
let dependent = get_many!(
let dependent_tasks = get_many!(
task,
CellDependent { cell: dependent_cell, task }
if dependent_cell == cell
Expand All @@ -49,17 +69,78 @@ impl UpdateCellOperation {
drop(task);
drop(old_content);

InvalidateOperation::run(
dependent,
#[cfg(feature = "trace_task_dirty")]
TaskDirtyCause::CellChange {
value_type: cell.type_id,
UpdateCellOperation::InvalidateWhenCellDependency {
cell_ref: CellRef {
task: task_id,
cell,
},
ctx,
);
dependent_tasks,
queue: AggregationUpdateQueue::new(),
}
.execute(&mut ctx);
} else {
drop(task);
drop(old_content);
}
}
}

impl Operation for UpdateCellOperation {
fn execute(mut self, ctx: &mut impl ExecuteContext) {
loop {
ctx.operation_suspend_point(&self);
match self {
UpdateCellOperation::InvalidateWhenCellDependency {
cell_ref,
ref mut dependent_tasks,
ref mut queue,
} => {
if let Some(dependent_task_id) = dependent_tasks.pop() {
if ctx.is_once_task(dependent_task_id) {
// once tasks are never invalidated
continue;
}
let dependent = ctx.task(dependent_task_id, TaskDataCategory::All);
if dependent.has_key(&CachedDataItemKey::OutdatedCellDependency {
target: cell_ref,
}) {
// cell dependency is outdated, so it hasn't read the cell yet
// and doesn't need to be invalidated
continue;
}
if !dependent
.has_key(&CachedDataItemKey::CellDependency { target: cell_ref })
{
// cell dependency has been removed, so the task doesn't depend on the
// cell anymore and doesn't need to be
// invalidated
continue;
}
make_task_dirty_internal(
dependent,
dependent_task_id,
true,
#[cfg(feature = "trace_task_dirty")]
TaskDirtyCause::CellChange {
value_type: cell_ref.cell.type_id,
},
queue,
ctx,
);
}
if dependent_tasks.is_empty() {
self = UpdateCellOperation::AggregationUpdate { queue: take(queue) };
}
}
UpdateCellOperation::AggregationUpdate { ref mut queue } => {
if queue.process(ctx) {
self = UpdateCellOperation::Done
}
}
UpdateCellOperation::Done => {
return;
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::{
TaskDataCategory,
operation::{
AggregationUpdateQueue, ExecuteContext, Operation, TaskGuard,
invalidate::{make_task_dirty, make_task_dirty_internal},
invalidate::make_task_dirty_internal,
},
storage::{get, get_many},
},
Expand All @@ -25,7 +25,6 @@ use crate::{
#[derive(Serialize, Deserialize, Clone, Default)]
pub enum UpdateOutputOperation {
MakeDependentTasksDirty {
#[cfg(feature = "trace_task_dirty")]
task_id: TaskId,
dependent_tasks: SmallVec<[TaskId; 4]>,
children: SmallVec<[TaskId; 4]>,
Expand Down Expand Up @@ -132,7 +131,6 @@ impl UpdateOutputOperation {
}

UpdateOutputOperation::MakeDependentTasksDirty {
#[cfg(feature = "trace_task_dirty")]
task_id,
dependent_tasks,
children,
Expand All @@ -148,15 +146,35 @@ impl Operation for UpdateOutputOperation {
ctx.operation_suspend_point(&self);
match self {
UpdateOutputOperation::MakeDependentTasksDirty {
#[cfg(feature = "trace_task_dirty")]
task_id,
ref mut dependent_tasks,
ref mut children,
ref mut queue,
} => {
if let Some(dependent_task_id) = dependent_tasks.pop() {
make_task_dirty(
if ctx.is_once_task(dependent_task_id) {
// once tasks are never invalidated
continue;
}
let dependent = ctx.task(dependent_task_id, TaskDataCategory::All);
if dependent.has_key(&CachedDataItemKey::OutdatedOutputDependency {
target: task_id,
}) {
// output dependency is outdated, so it hasn't read the output yet
// and doesn't need to be invalidated
continue;
}
if !dependent
.has_key(&CachedDataItemKey::OutputDependency { target: task_id })
{
// output dependency has been removed, so the task doesn't depend on the
// output anymore and doesn't need to be invalidated
continue;
}
make_task_dirty_internal(
dependent,
dependent_task_id,
true,
#[cfg(feature = "trace_task_dirty")]
TaskDirtyCause::OutputChange { task_id },
queue,
Expand Down
Loading